diff --git a/.markdownlint.json b/.markdownlint.json new file mode 100644 index 00000000..ffe10de6 --- /dev/null +++ b/.markdownlint.json @@ -0,0 +1,6 @@ +{ + "MD013": { + "code_blocks": false, + "tables": false + } +} diff --git a/Cargo.lock b/Cargo.lock index b389f80c..fc3c2d95 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -45,6 +45,12 @@ dependencies = [ "libc", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + [[package]] name = "annotate-snippets" version = "0.10.2" @@ -107,15 +113,15 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.100" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" [[package]] name = "ar_archive_writer" -version = "0.2.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c269894b6fe5e9d7ada0cf69b5bf847ff35bc25fc271f08e1d080fce80339a" +checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b" dependencies = [ "object", ] @@ -254,9 +260,15 @@ checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" [[package]] name = "bitflags" -version = "2.10.0" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" [[package]] name = "block-buffer" @@ -269,9 +281,9 @@ dependencies = [ [[package]] name = "bon" -version = "3.8.2" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234655ec178edd82b891e262ea7cf71f6584bcd09eff94db786be23f1821825c" +checksum = "2d13a61f2963b88eef9c1be03df65d42f6996dfeac1054870d950fcf66686f83" dependencies = [ "bon-macros", "rustversion", @@ -279,9 +291,9 @@ dependencies = [ [[package]] name = "bon-macros" -version = "3.8.2" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ec27229c38ed0eb3c0feee3d2c1d6a4379ae44f418a29a658890e062d8f365" +checksum = "d314cc62af2b6b0c65780555abb4d02a03dd3b799cd42419044f0c38d99738c0" dependencies = [ "darling", "ident_case", @@ -304,9 +316,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.19.1" +version = "3.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" [[package]] name = "bytecount" @@ -322,15 +334,21 @@ checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" [[package]] name = "bytes" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.53" +version = "1.2.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755d2fce177175ffca841e9a06afdb2c4ab0f593d53b4dee48147dfaade85932" +checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2" dependencies = [ "find-msvc-tools", "shlex", @@ -350,9 +368,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.43" +version = "0.4.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" +checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" dependencies = [ "iana-time-zone", "js-sys", @@ -361,11 +379,38 @@ dependencies = [ "windows-link", ] +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "clap" -version = "4.5.54" +version = "4.5.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6e6ff9dcd79cff5cd969a17a545d79e84ab086e444102a591e288a8aa3ce394" +checksum = "2797f34da339ce31042b27d23607e051786132987f595b02ba4f6a6dffb7030a" dependencies = [ "clap_builder", "clap_derive", @@ -373,9 +418,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.54" +version = "4.5.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa42cf4d2b7a41bc8f663a7cab4031ebafa1bf3875705bfaf8466dc60ab52c00" +checksum = "24a241312cea5059b13574bb9b3861cabf758b879c15190b37b6d6fd63ab6876" dependencies = [ "anstream", "anstyle", @@ -385,18 +430,18 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.65" +version = "4.5.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "430b4dc2b5e3861848de79627b2bedc9f3342c7da5173a14eaa5d0f8dc18ae5d" +checksum = "c757a3b7e39161a4e56f9365141ada2a6c915a8622c408ab6bb4b5d047371031" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.49" +version = "4.5.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" +checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5" dependencies = [ "heck", "proc-macro2", @@ -406,9 +451,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.7" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32" +checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831" [[package]] name = "colorchoice" @@ -448,6 +493,15 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "core-foundation" version = "0.10.1" @@ -488,6 +542,51 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "criterion" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "criterion-plot", + "is-terminal", + "itertools 0.10.5", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-deque" version = "0.8.6" @@ -513,6 +612,12 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + [[package]] name = "crypto-common" version = "0.1.7" @@ -557,6 +662,20 @@ dependencies = [ "syn", ] +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "deadpool" version = "0.12.3" @@ -575,13 +694,35 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" +[[package]] +name = "derive_more" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05" +dependencies = [ + "derive_more-impl 1.0.0", +] + [[package]] name = "derive_more" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" dependencies = [ - "derive_more-impl", + "derive_more-impl 2.1.1", +] + +[[package]] +name = "derive_more-impl" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "syn", + "unicode-xid", ] [[package]] @@ -794,26 +935,41 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "find-msvc-tools" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" [[package]] name = "flate2" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b375d6465b98090a5f25b1c7703f3859783755aa9a80433b36e0379a3ec2f369" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" dependencies = [ "crc32fast", "miniz_oxide", ] +[[package]] +name = "fluent-uri" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17c704e9dbe1ddd863da1e6ff3567795087b1eb201ce80d8fa81162e1516500d" +dependencies = [ + "bitflags 1.3.2", +] + [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "foldhash" version = "0.2.0" @@ -831,9 +987,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", @@ -846,9 +1002,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -856,15 +1012,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" [[package]] name = "futures-executor" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" dependencies = [ "futures-core", "futures-task", @@ -873,15 +1029,15 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", @@ -890,15 +1046,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-timer" @@ -908,9 +1064,9 @@ checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures-channel", "futures-core", @@ -920,7 +1076,6 @@ dependencies = [ "futures-task", "memchr", "pin-project-lite", - "pin-utils", "slab", ] @@ -957,6 +1112,19 @@ dependencies = [ "wasip2", ] +[[package]] +name = "getrandom" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", + "wasip3", +] + [[package]] name = "glob" version = "0.3.3" @@ -1018,7 +1186,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" dependencies = [ "atomic-waker", - "bytes 1.11.0", + "bytes 1.11.1", "fnv", "futures-core", "futures-sink", @@ -1030,6 +1198,17 @@ dependencies = [ "tracing", ] +[[package]] +name = "half" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" +dependencies = [ + "cfg-if", + "crunchy", + "zerocopy", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -1046,6 +1225,15 @@ dependencies = [ "allocator-api2", ] +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash 0.1.5", +] + [[package]] name = "hashbrown" version = "0.16.1" @@ -1054,7 +1242,7 @@ checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" dependencies = [ "allocator-api2", "equivalent", - "foldhash", + "foldhash 0.2.0", ] [[package]] @@ -1099,7 +1287,7 @@ version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "fnv", "itoa", ] @@ -1110,7 +1298,7 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "itoa", ] @@ -1120,7 +1308,7 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "http 1.4.0", ] @@ -1130,7 +1318,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "futures-core", "http 1.4.0", "http-body", @@ -1156,7 +1344,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ "atomic-waker", - "bytes 1.11.0", + "bytes 1.11.1", "futures-channel", "futures-core", "h2", @@ -1206,14 +1394,13 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" dependencies = [ "base64 0.22.1", - "bytes 1.11.0", + "bytes 1.11.1", "futures-channel", - "futures-core", "futures-util", "http 1.4.0", "http-body", @@ -1230,9 +1417,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.64" +version = "0.1.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -1333,6 +1520,12 @@ dependencies = [ "zerovec", ] +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + [[package]] name = "ident_case" version = "1.0.1" @@ -1406,9 +1599,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.46.1" +version = "1.46.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248b42847813a1550dafd15296fd9748c651d0c32194559dbc05d804d54b21e8" +checksum = "e82db8c87c7f1ccecb34ce0c24399b8a73081427f3c7c50a5d597925356115e4" dependencies = [ "console", "once_cell", @@ -1432,12 +1625,32 @@ dependencies = [ "serde", ] +[[package]] +name = "is-terminal" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.61.2", +] + [[package]] name = "is_terminal_polyfill" version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.13.0" @@ -1464,9 +1677,9 @@ checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" [[package]] name = "jiff" -version = "0.2.18" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e67e8da4c49d6d9909fe03361f9b620f58898859f5c7aded68351e85e71ecf50" +checksum = "b3e3d65f018c6ae946ab16e80944b97096ed73c35b221d1c478a6c81d8f57940" dependencies = [ "jiff-static", "log", @@ -1477,9 +1690,9 @@ dependencies = [ [[package]] name = "jiff-static" -version = "0.2.18" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0c84ee7f197eca9a86c6fd6cb771e55eb991632f15f2bc3ca6ec838929e6e78" +checksum = "a17c2b211d863c7fde02cbea8a3c1a439b98e109286554f2860bdded7ff83818" dependencies = [ "proc-macro2", "quote", @@ -1547,6 +1760,9 @@ dependencies = [ "indoc", "insta", "jrsonnet-rowan-parser", + "rstest 0.23.0", + "serde", + "serde_json", "tempfile", "thiserror 1.0.69", ] @@ -1572,21 +1788,207 @@ dependencies = [ ] [[package]] -name = "jrsonnet-interner" +name = "jrsonnet-interner" +version = "0.5.0-pre97" +dependencies = [ + "hashbrown 0.14.5", + "jrsonnet-gcmodule", + "rustc-hash", +] + +[[package]] +name = "jrsonnet-lint" +version = "0.5.0-pre97" +dependencies = [ + "clap", + "jrsonnet-rowan-parser", + "rowan", + "thiserror 1.0.69", +] + +[[package]] +name = "jrsonnet-lsp" +version = "0.5.0-pre97" +dependencies = [ + "anyhow", + "assert_matches", + "criterion", + "crossbeam-channel", + "jrsonnet-evaluator", + "jrsonnet-lsp-check", + "jrsonnet-lsp-document", + "jrsonnet-lsp-handlers", + "jrsonnet-lsp-import", + "jrsonnet-lsp-inference", + "jrsonnet-lsp-scenario", + "jrsonnet-lsp-scope", + "jrsonnet-lsp-types", + "jrsonnet-parser", + "jrsonnet-rowan-parser", + "jrsonnet-stdlib", + "lsp-server", + "lsp-types", + "parking_lot", + "rayon", + "rstest 0.23.0", + "rustc-hash", + "serde", + "serde_json", + "tempfile", + "thiserror 1.0.69", + "tracing", +] + +[[package]] +name = "jrsonnet-lsp-bin" +version = "0.5.0-pre97" +dependencies = [ + "anyhow", + "clap", + "jrsonnet-lsp", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "jrsonnet-lsp-check" +version = "0.5.0-pre97" +dependencies = [ + "assert_matches", + "jrsonnet-lsp-document", + "jrsonnet-lsp-inference", + "jrsonnet-lsp-scope", + "jrsonnet-lsp-stdlib", + "jrsonnet-lsp-types", + "jrsonnet-rowan-parser", + "lsp-types", + "rowan", + "rstest 0.23.0", + "rustc-hash", +] + +[[package]] +name = "jrsonnet-lsp-document" +version = "0.5.0-pre97" +dependencies = [ + "assert_matches", + "derive_more 1.0.0", + "jrsonnet-rowan-parser", + "lsp-types", + "parking_lot", + "rowan", + "thiserror 1.0.69", + "url", +] + +[[package]] +name = "jrsonnet-lsp-handlers" +version = "0.5.0-pre97" +dependencies = [ + "assert_matches", + "indoc", + "jrsonnet-fmt", + "jrsonnet-lsp-document", + "jrsonnet-lsp-import", + "jrsonnet-lsp-inference", + "jrsonnet-lsp-scope", + "jrsonnet-lsp-stdlib", + "jrsonnet-lsp-types", + "jrsonnet-rowan-parser", + "lsp-types", + "rayon", + "rowan", + "rstest 0.23.0", + "serde", + "serde_json", + "strum", + "tempfile", + "thiserror 1.0.69", + "tracing", +] + +[[package]] +name = "jrsonnet-lsp-import" +version = "0.5.0-pre97" +dependencies = [ + "criterion", + "jrsonnet-lsp-document", + "jrsonnet-rowan-parser", + "rayon", + "rowan", + "rustc-hash", + "tempfile", +] + +[[package]] +name = "jrsonnet-lsp-inference" +version = "0.5.0-pre97" +dependencies = [ + "assert_matches", + "dashmap", + "jrsonnet-lsp-document", + "jrsonnet-lsp-import", + "jrsonnet-lsp-scope", + "jrsonnet-lsp-stdlib", + "jrsonnet-lsp-types", + "jrsonnet-rowan-parser", + "jrsonnet-std-sig", + "lru", + "lsp-types", + "moka", + "parking_lot", + "rayon", + "rowan", + "rstest 0.23.0", + "rustc-hash", + "strsim", + "tracing", +] + +[[package]] +name = "jrsonnet-lsp-scenario" +version = "0.5.0-pre97" +dependencies = [ + "assert_matches", + "crossbeam-channel", + "jrsonnet-lsp-types", + "lsp-server", + "lsp-types", + "rowan", + "serde", + "serde_json", + "serde_yaml_with_quirks", + "tempfile", + "thiserror 1.0.69", +] + +[[package]] +name = "jrsonnet-lsp-scope" version = "0.5.0-pre97" dependencies = [ - "hashbrown 0.14.5", - "jrsonnet-gcmodule", + "jrsonnet-lsp-document", + "jrsonnet-rowan-parser", + "rowan", "rustc-hash", ] [[package]] -name = "jrsonnet-lint" +name = "jrsonnet-lsp-stdlib" version = "0.5.0-pre97" dependencies = [ - "clap", + "indoc", + "jrsonnet-lsp-types", + "jrsonnet-std-sig", +] + +[[package]] +name = "jrsonnet-lsp-types" +version = "0.5.0-pre97" +dependencies = [ + "assert_matches", "jrsonnet-rowan-parser", - "rowan", + "rstest 0.23.0", + "rustc-hash", "thiserror 1.0.69", ] @@ -1618,11 +2020,19 @@ dependencies = [ "drop_bomb", "indoc", "insta", + "jrsonnet-evaluator", "logos", "rowan", "thiserror 1.0.69", ] +[[package]] +name = "jrsonnet-std-sig" +version = "0.5.0-pre97" +dependencies = [ + "jrsonnet-rowan-parser", +] + [[package]] name = "jrsonnet-stdlib" version = "0.5.0-pre97" @@ -1656,9 +2066,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.85" +version = "0.3.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" +checksum = "14dc6f6450b3f6d4ed5b16327f38fed626d375a886159ca555bd7822c0c3a5a6" dependencies = [ "once_cell", "wasm-bindgen", @@ -1715,7 +2125,7 @@ name = "k8s" version = "0.1.0" dependencies = [ "phf", - "rstest", + "rstest 0.26.1", "serde", "serde_json", "thiserror 1.0.69", @@ -1750,18 +2160,18 @@ dependencies = [ [[package]] name = "keccak" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +checksum = "cb26cec98cce3a3d96cbb7bced3c4b16e3d13f27ec56dbd62cbc8f39cfb9d653" dependencies = [ "cpufeatures", ] [[package]] name = "kube" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dae7229247e4215781e5c5104a056e1e2163943e577f9084cf8bba7b5248f7a" +checksum = "f96b537b4c4f61fc183594edbecbbefa3037e403feac0701bb24e6eff78e0034" dependencies = [ "k8s-openapi", "kube-client", @@ -1772,12 +2182,12 @@ dependencies = [ [[package]] name = "kube-client" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "010875e291a9c0a4e076f4f9c35b97d82fd2372cb3bc713252c3d08b7e73ce5b" +checksum = "af97b8b696eb737e5694f087c498ca725b172c2a5bc3a6916328d160225537ee" dependencies = [ "base64 0.22.1", - "bytes 1.11.0", + "bytes 1.11.1", "either", "futures", "http 1.4.0", @@ -1807,11 +2217,11 @@ dependencies = [ [[package]] name = "kube-core" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ac76281aa698dd34111e25b21f5f6561932a30feabab5357152be273f8a81bb" +checksum = "e7aeade7d2e9f165f96b3c1749ff01a8e2dc7ea954bd333bcfcecc37d5226bdd" dependencies = [ - "derive_more", + "derive_more 2.1.1", "form_urlencoded", "http 1.4.0", "jiff", @@ -1826,9 +2236,9 @@ dependencies = [ [[package]] name = "kube-derive" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "599c09721efcccc0e6a26e93df28c587da60ff5e099c657626fff2af0ae4cbb8" +checksum = "c98f59f4e68864624a0b993a1cc2424439ab7238eaede5c299e89943e2a093ff" dependencies = [ "darling", "proc-macro2", @@ -1840,9 +2250,9 @@ dependencies = [ [[package]] name = "kube-runtime" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db43d26700f564baf850f681f3cb0f1195d2699bd379bfa70750ecec4dcb209" +checksum = "fc158473d6d86ec22692874bd5ddccf07474eab5c6bb41f226c522e945da5244" dependencies = [ "ahash", "async-broadcast", @@ -1871,11 +2281,17 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + [[package]] name = "libc" -version = "0.2.180" +version = "0.2.182" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" +checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" [[package]] name = "libjsonnet" @@ -1894,7 +2310,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" dependencies = [ - "bitflags", + "bitflags 2.11.0", "libc", ] @@ -1906,9 +2322,9 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" [[package]] name = "litemap" @@ -1973,6 +2389,32 @@ dependencies = [ "hashbrown 0.16.1", ] +[[package]] +name = "lsp-server" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d6ada348dbc2703cbe7637b2dda05cff84d3da2819c24abcb305dd613e0ba2e" +dependencies = [ + "crossbeam-channel", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "lsp-types" +version = "0.97.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53353550a17c04ac46c585feb189c2db82154fc84b79c7a66c96c2c644f66071" +dependencies = [ + "bitflags 1.3.2", + "fluent-uri", + "serde", + "serde_json", + "serde_repr", +] + [[package]] name = "matchers" version = "0.2.0" @@ -1990,9 +2432,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.7.6" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" [[package]] name = "memoffset" @@ -2073,13 +2515,30 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "moka" +version = "0.12.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ac832c50ced444ef6be0767a008b02c106a909ba79d1d830501e94b96f6b7e" +dependencies = [ + "crossbeam-channel", + "crossbeam-epoch", + "crossbeam-utils", + "equivalent", + "parking_lot", + "portable-atomic", + "smallvec 1.15.1", + "tagptr", + "uuid", +] + [[package]] name = "nix" version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags", + "bitflags 2.11.0", "cfg-if", "cfg_aliases", "libc", @@ -2168,9 +2627,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.32.2" +version = "0.37.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" dependencies = [ "memchr", ] @@ -2187,6 +2646,12 @@ version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" +[[package]] +name = "oorandom" +version = "11.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" + [[package]] name = "openssl-probe" version = "0.2.1" @@ -2214,7 +2679,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7a6d09a73194e6b66df7c8f1b680f156d916a1a942abf2de06823dd02b7855d" dependencies = [ "async-trait", - "bytes 1.11.0", + "bytes 1.11.1", "http 1.4.0", "opentelemetry", "reqwest", @@ -2381,9 +2846,9 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9eb05c21a464ea704b53158d358a31e6425db2f63a1a7312268b05fe2b75f7" +checksum = "e0848c601009d37dfa3430c4666e147e49cdcf1b92ecd3e63657d8a5f19da662" dependencies = [ "memchr", "ucd-trie", @@ -2391,9 +2856,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f9dbced329c441fa79d80472764b1a2c7e57123553b8519b36663a2fb234ed" +checksum = "11f486f1ea21e6c10ed15d5a7c77165d0ee443402f0780849d1768e7d9d6fe77" dependencies = [ "pest", "pest_generator", @@ -2401,9 +2866,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bb96d5051a78f44f43c8f712d8e810adb0ebf923fc9ed2655a7f66f63ba8ee5" +checksum = "8040c4647b13b210a963c1ed407c1ff4fdfa01c31d6d2a098218702e6664f94f" dependencies = [ "pest", "pest_meta", @@ -2414,9 +2879,9 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "602113b5b5e8621770cfd490cfd90b9f84ab29bd2b0e49ad83eb6d186cef2365" +checksum = "89815c69d36021a140146f26659a81d6c2afa33d216d736dd4be5381a7362220" dependencies = [ "pest", "sha2", @@ -2467,18 +2932,18 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.10" +version = "1.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +checksum = "f1749c7ed4bcaf4c3d0a3efc28538844fb29bcdd7d2b67b2be7e20ba861ff517" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.10" +version = "1.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +checksum = "d9b20ed30f105399776b9c883e68e536ef602a16ae6f596d2c473591d6ad64c6" dependencies = [ "proc-macro2", "quote", @@ -2487,9 +2952,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" [[package]] name = "pin-utils" @@ -2497,17 +2962,45 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + [[package]] name = "portable-atomic" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" [[package]] name = "portable-atomic-util" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" +checksum = "7a9db96d7fa8782dd8c15ce32ffe8680bbd1e978a43bf51a34d39483540495f5" dependencies = [ "portable-atomic", ] @@ -2551,9 +3044,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.105" +version = "1.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" dependencies = [ "unicode-ident", ] @@ -2564,7 +3057,7 @@ version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2ea70524a2f82d518bce41317d0fae74151505651af45faf1ffbd6fd33f0568" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "prost-derive", ] @@ -2583,9 +3076,9 @@ dependencies = [ [[package]] name = "psm" -version = "0.1.28" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d11f2fedc3b7dafdc2851bc52f277377c5473d378859be234bc7ebb593144d01" +checksum = "3852766467df634d74f0b2d7819bf8dc483a0eb2e3b0f50f756f9cfe8b0d18d8" dependencies = [ "ar_archive_writer", "cc", @@ -2593,9 +3086,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.43" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" +checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" dependencies = [ "proc-macro2", ] @@ -2709,7 +3202,7 @@ version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags", + "bitflags 2.11.0", ] [[package]] @@ -2745,9 +3238,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.12.2" +version = "1.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" dependencies = [ "aho-corasick", "memchr", @@ -2757,9 +3250,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" dependencies = [ "aho-corasick", "memchr", @@ -2768,9 +3261,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" [[package]] name = "relative-path" @@ -2785,7 +3278,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" dependencies = [ "base64 0.22.1", - "bytes 1.11.0", + "bytes 1.11.1", "futures-channel", "futures-core", "futures-util", @@ -2839,6 +3332,18 @@ dependencies = [ "text-size", ] +[[package]] +name = "rstest" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a2c585be59b6b5dd66a9d2084aa1d8bd52fbdb806eafdeffb52791147862035" +dependencies = [ + "futures", + "futures-timer", + "rstest_macros 0.23.0", + "rustc_version", +] + [[package]] name = "rstest" version = "0.26.1" @@ -2847,7 +3352,25 @@ checksum = "f5a3193c063baaa2a95a33f03035c8a72b83d97a54916055ba22d35ed3839d49" dependencies = [ "futures-timer", "futures-util", - "rstest_macros", + "rstest_macros 0.26.1", +] + +[[package]] +name = "rstest_macros" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "825ea780781b15345a146be27eaefb05085e337e869bff01b4306a4fd4a9ad5a" +dependencies = [ + "cfg-if", + "glob", + "proc-macro-crate", + "proc-macro2", + "quote", + "regex", + "relative-path", + "rustc_version", + "syn", + "unicode-ident", ] [[package]] @@ -2875,7 +3398,7 @@ dependencies = [ "anyhow", "assert_matches", "bon", - "bytes 1.11.0", + "bytes 1.11.1", "clap", "dirs", "gtmpl", @@ -2901,7 +3424,7 @@ dependencies = [ "patch", "rayon", "regex", - "rstest", + "rstest 0.26.1", "rtk-diff", "serde", "serde-saphyr", @@ -2929,7 +3452,7 @@ dependencies = [ "anyhow", "nu-ansi-term", "patch", - "rstest", + "rstest 0.26.1", "serde_json", "similar", "syntect", @@ -2954,11 +3477,11 @@ dependencies = [ [[package]] name = "rustix" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" dependencies = [ - "bitflags", + "bitflags 2.11.0", "errno", "libc", "linux-raw-sys", @@ -2967,9 +3490,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.36" +version = "0.23.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" +checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4" dependencies = [ "log", "once_cell", @@ -3020,9 +3543,9 @@ checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" [[package]] name = "same-file" @@ -3053,9 +3576,9 @@ dependencies = [ [[package]] name = "schemars" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54e910108742c57a770f492731f99be216a52fadd361b06c8fb59d74ccc267d2" +checksum = "a2b42f36aa1cd011945615b92222f6bf73c599a102a300334cd7f8dbeec726cc" dependencies = [ "dyn-clone", "ref-cast", @@ -3066,9 +3589,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4908ad288c5035a8eb12cfdf0d49270def0a268ee162b75eeee0f85d155a7c45" +checksum = "7d115b50f4aaeea07e79c1912f645c7513d81715d0420f8bc77a18c6260b307f" dependencies = [ "proc-macro2", "quote", @@ -3093,11 +3616,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "3.5.1" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" dependencies = [ - "bitflags", + "bitflags 2.11.0", "core-foundation", "core-foundation-sys", "libc", @@ -3106,9 +3629,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.15.0" +version = "2.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" dependencies = [ "core-foundation-sys", "libc", @@ -3202,6 +3725,17 @@ dependencies = [ "zmij", ] +[[package]] +name = "serde_repr" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "serde_spanned" version = "0.6.9" @@ -3325,9 +3859,9 @@ checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" [[package]] name = "slab" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" [[package]] name = "smallvec" @@ -3343,9 +3877,9 @@ checksum = "ef784004ca8777809dcdad6ac37629f0a97caee4c685fcea805278d81dd8b857" [[package]] name = "socket2" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" dependencies = [ "libc", "windows-sys 0.60.2", @@ -3359,9 +3893,9 @@ checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "stacker" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1f8b29fb42aafcea4edeeb6b2f2d7ecd0d969c48b4cf0d2e64aafc471dd6e59" +checksum = "08d74a23609d509411d10e2176dc2a4346e3b4aea2e7b1869f19fdedbc71c013" dependencies = [ "cc", "cfg-if", @@ -3382,6 +3916,28 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn", +] + [[package]] name = "subtle" version = "2.6.1" @@ -3390,9 +3946,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.114" +version = "2.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" dependencies = [ "proc-macro2", "quote", @@ -3457,14 +4013,20 @@ dependencies = [ "unicode-width 0.2.2", ] +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + [[package]] name = "tempfile" -version = "3.24.0" +version = "3.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" +checksum = "82a72c767771b47409d2345987fda8628641887d5466101319899796367354a0" dependencies = [ "fastrand", - "getrandom 0.3.4", + "getrandom 0.4.1", "once_cell", "rustix", "windows-sys 0.61.2", @@ -3547,6 +4109,16 @@ dependencies = [ "zerovec", ] +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "tk-compare" version = "0.1.0" @@ -3561,7 +4133,7 @@ dependencies = [ "k8s-mock", "k8s-openapi", "regex", - "rstest", + "rstest 0.26.1", "rtk-diff", "serde", "serde_json", @@ -3580,7 +4152,7 @@ version = "1.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "libc", "mio", "pin-project-lite", @@ -3639,7 +4211,7 @@ version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "futures-core", "futures-sink", "pin-project-lite", @@ -3705,9 +4277,9 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.6+spec-1.1.0" +version = "1.0.9+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" +checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4" dependencies = [ "winnow", ] @@ -3720,13 +4292,13 @@ checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" [[package]] name = "tonic" -version = "0.14.2" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb7613188ce9f7df5bfe185db26c5814347d110db17920415cf2fbcad85e7203" +checksum = "fec7c61a0695dc1887c1b53952990f3ad2e3a31453e1f49f10e75424943a93ec" dependencies = [ "async-trait", "base64 0.22.1", - "bytes 1.11.0", + "bytes 1.11.1", "http 1.4.0", "http-body", "http-body-util", @@ -3746,11 +4318,11 @@ dependencies = [ [[package]] name = "tonic-prost" -version = "0.14.2" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66bd50ad6ce1252d87ef024b3d64fe4c3cf54a86fb9ef4c631fdd0ded7aeaa67" +checksum = "a55376a0bbaa4975a3f10d009ad763d8f4108f067c7c2e74f3001fb49778d309" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "prost", "tonic", ] @@ -3781,8 +4353,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ "base64 0.22.1", - "bitflags", - "bytes 1.11.0", + "bitflags 2.11.0", + "bytes 1.11.1", "futures-util", "http 1.4.0", "http-body", @@ -3951,9 +4523,15 @@ checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f" [[package]] name = "unicode-ident" -version = "1.0.22" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-width" @@ -3967,6 +4545,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -4009,6 +4593,17 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "uuid" +version = "1.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb" +dependencies = [ + "getrandom 0.4.1", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "valuable" version = "0.1.1" @@ -4055,11 +4650,20 @@ dependencies = [ "wit-bindgen", ] +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + [[package]] name = "wasm-bindgen" -version = "0.2.108" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" +checksum = "60722a937f594b7fde9adb894d7c092fc1bb6612897c46368d18e7a20208eff2" dependencies = [ "cfg-if", "once_cell", @@ -4070,9 +4674,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.58" +version = "0.4.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70a6e77fd0ae8029c9ea0063f87c46fde723e7d887703d74ad2616d792e51e6f" +checksum = "8a89f4650b770e4521aa6573724e2aed4704372151bd0de9d16a3bbabb87441a" dependencies = [ "cfg-if", "futures-util", @@ -4084,9 +4688,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.108" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" +checksum = "0fac8c6395094b6b91c4af293f4c79371c163f9a6f56184d2c9a85f5a95f3950" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4094,9 +4698,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.108" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" +checksum = "ab3fabce6159dc20728033842636887e4877688ae94382766e00b180abac9d60" dependencies = [ "bumpalo", "proc-macro2", @@ -4107,18 +4711,52 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.108" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" +checksum = "de0e091bdb824da87dc01d967388880d017a0a9bc4f3bdc0d86ee9f9336e3bb5" dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap 2.13.0", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.11.0", + "hashbrown 0.15.5", + "indexmap 2.13.0", + "semver", +] + [[package]] name = "web-sys" -version = "0.3.85" +version = "0.3.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" +checksum = "705eceb4ce901230f8625bd1d665128056ccbe4b7408faa625eec1ba80f59a97" dependencies = [ "js-sys", "wasm-bindgen", @@ -4136,9 +4774,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12bed680863276c63889429bfd6cab3b99943659923822de1c8a39c49e4d722c" +checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed" dependencies = [ "rustls-pki-types", ] @@ -4430,6 +5068,88 @@ name = "wit-bindgen" version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap 2.13.0", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags 2.11.0", + "indexmap 2.13.0", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap 2.13.0", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] [[package]] name = "writeable" @@ -4500,18 +5220,18 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.33" +version = "0.8.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" +checksum = "a789c6e490b576db9f7e6b6d661bcc9799f7c0ac8352f56ea20193b2681532e5" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.33" +version = "0.8.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" +checksum = "f65c489a7071a749c849713807783f70672b28094011623e200cb86dcb835953" dependencies = [ "proc-macro2", "quote", @@ -4580,6 +5300,6 @@ dependencies = [ [[package]] name = "zmij" -version = "1.0.16" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfcd145825aace48cff44a8844de64bf75feec3080e0aa5cdbde72961ae51a65" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/Cargo.toml b/Cargo.toml index e2828cb2..87f44e42 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] members = ["crates/*", "bindings/jsonnet", "cmds/*", "tests", "xtask"] -default-members = ["cmds/jrsonnet"] +default-members = ["cmds/jrsonnet", "cmds/jrsonnet-lsp"] resolver = "2" [workspace.package] @@ -83,6 +83,11 @@ indexmap = "2.2.3" itertools = "0.13.0" xshell = "0.2.6" +lsp-server = "0.7.6" +lsp-types = "0.97.0" +parking_lot = "0.12.5" +url = "2.5.4" + regex = "1.10" lru = "0.16.0" @@ -152,19 +157,18 @@ similar_names = "allow" # Pre-existing issues in jrsonnet crates unnecessary_semicolon = "allow" too_long_first_doc_paragraph = "allow" -map_or_unwrap = "allow" -option_if_let_else = "allow" +map_unwrap_or = "warn" manual_repeat_n = "allow" mem_replace_option_with_some = "allow" unnecessary_literal_bound = "allow" -unnecessary_map_or = "allow" +unnecessary_map_or = "warn" collapsible_match = "allow" doc_markdown = "allow" items_after_statements = "allow" manual_ignore_case_cmp = "allow" manual_midpoint = "allow" needless_borrows_for_generic_args = "allow" -needless_continue = "allow" +needless_continue = "warn" ref_option = "allow" single_component_path_imports = "allow" too_many_lines = "allow" diff --git a/bindings/jsonnet/src/import.rs b/bindings/jsonnet/src/import.rs index 10473336..7c328a8d 100644 --- a/bindings/jsonnet/src/import.rs +++ b/bindings/jsonnet/src/import.rs @@ -65,8 +65,8 @@ impl ImportResolver for CallbackImportResolver { base.as_ptr(), rel.as_ptr(), &mut found_here.cast_const(), - &mut buf, - &mut buf_len, + &raw mut buf, + &raw mut buf_len, ) }; let buf_slice: &[u8] = unsafe { std::slice::from_raw_parts(buf.cast(), buf_len) }; diff --git a/bindings/jsonnet/src/native.rs b/bindings/jsonnet/src/native.rs index 3f56df87..0b462653 100644 --- a/bindings/jsonnet/src/native.rs +++ b/bindings/jsonnet/src/native.rs @@ -43,7 +43,7 @@ impl NativeCallbackHandler for JsonnetNativeCallbackHandler { } n_args.push(None); let mut success = 1; - let v = unsafe { (self.cb)(self.ctx, n_args.as_ptr().cast(), &mut success) }; + let v = unsafe { (self.cb)(self.ctx, n_args.as_ptr().cast(), &raw mut success) }; let v = unsafe { *Box::from_raw(v) }; if success == 1 { Ok(v) diff --git a/cmds/jrsonnet-fmt/Cargo.toml b/cmds/jrsonnet-fmt/Cargo.toml index 8257dbec..e0e8221f 100644 --- a/cmds/jrsonnet-fmt/Cargo.toml +++ b/cmds/jrsonnet-fmt/Cargo.toml @@ -15,6 +15,11 @@ jrsonnet-rowan-parser.workspace = true insta.workspace = true indoc.workspace = true hi-doc.workspace = true +serde = { workspace = true, features = ["derive"] } clap = { workspace = true, features = ["derive"] } tempfile.workspace = true thiserror.workspace = true + +[dev-dependencies] +rstest = "0.23" +serde_json.workspace = true diff --git a/cmds/jrsonnet-fmt/src/api.rs b/cmds/jrsonnet-fmt/src/api.rs new file mode 100644 index 00000000..59ef9722 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/api.rs @@ -0,0 +1,233 @@ +use dprint_core::formatting::{PrintItems, PrintOptions}; + +use crate::{FormatContext, FormatOptions, Printable}; + +const CONVERGENCE_LIMIT: usize = 10; + +/// A byte-range text edit produced by formatter helpers. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ByteRangeEdit { + pub range: std::ops::Range, + pub new_text: String, +} + +fn trim_trailing_whitespace(text: &str) -> String { + let mut trimmed = String::with_capacity(text.len()); + for segment in text.split_inclusive('\n') { + if let Some(line) = segment.strip_suffix('\n') { + trimmed.push_str(line.trim_end_matches([' ', '\t'])); + trimmed.push('\n'); + } else { + trimmed.push_str(segment.trim_end_matches([' ', '\t'])); + } + } + trimmed +} + +/// Format Jsonnet source code in-process. +/// +/// Applies repeated formatting passes until output stabilizes or the +/// convergence limit is reached. +/// Returns `None` when parsing fails. +#[must_use] +pub fn format_code(input: &str, opts: &FormatOptions) -> Option { + let mut iteration = 0; + let mut formatted = input.to_owned(); + + // https://github.com/dprint/dprint/pull/423 + loop { + let reformatted = format_once(&formatted, opts)?; + let convergence_tmp = reformatted.trim().to_owned(); + if formatted == convergence_tmp { + break; + } + formatted = convergence_tmp; + iteration += 1; + if iteration > CONVERGENCE_LIMIT { + return None; + } + } + + if opts.trim_trailing_whitespace { + formatted = trim_trailing_whitespace(&formatted); + } + formatted.push('\n'); + if opts.trim_final_newlines { + formatted.truncate(formatted.trim_end_matches('\n').len()); + } + Some(formatted) +} + +fn common_prefix_len(left: &str, right: &str) -> usize { + let mut prefix = 0; + let mut left_chars = left.char_indices(); + let mut right_chars = right.char_indices(); + + loop { + match (left_chars.next(), right_chars.next()) { + (Some((idx, left_char)), Some((_, right_char))) if left_char == right_char => { + prefix = idx + left_char.len_utf8(); + } + _ => return prefix, + } + } +} + +fn common_suffix_len(left: &str, right: &str) -> usize { + let mut suffix = 0; + let mut left_chars = left.char_indices().rev(); + let mut right_chars = right.char_indices().rev(); + + loop { + match (left_chars.next(), right_chars.next()) { + (Some((left_idx, left_char)), Some((right_idx, right_char))) + if left_char == right_char => + { + let left_suffix = left.len().saturating_sub(left_idx); + let right_suffix = right.len().saturating_sub(right_idx); + suffix = left_suffix.min(right_suffix); + } + _ => return suffix, + } + } +} + +fn minimal_change_ranges( + before: &str, + after: &str, +) -> Option<(std::ops::Range, std::ops::Range)> { + if before == after { + return None; + } + + let prefix = common_prefix_len(before, after); + let suffix = common_suffix_len(&before[prefix..], &after[prefix..]); + let before_end = before.len().saturating_sub(suffix); + let after_end = after.len().saturating_sub(suffix); + Some((prefix..before_end, prefix..after_end)) +} + +/// Format Jsonnet source and return a constrained range edit. +/// +/// This runs full-document formatting and computes the minimal changed byte +/// range. An edit is returned only when the formatter's changes are fully +/// inside `requested_range`. +/// +/// Returns `None` when parsing fails. +#[must_use] +pub fn format_code_range( + input: &str, + requested_range: std::ops::Range, + opts: &FormatOptions, +) -> Option> { + if requested_range.start > requested_range.end || requested_range.end > input.len() { + return Some(Vec::new()); + } + + let formatted = format_code(input, opts)?; + let Some((old_range, new_range)) = minimal_change_ranges(input, &formatted) else { + return Some(Vec::new()); + }; + + if old_range.start < requested_range.start || old_range.end > requested_range.end { + return Some(Vec::new()); + } + + Some(vec![ByteRangeEdit { + range: old_range, + new_text: formatted[new_range].to_string(), + }]) +} + +fn format_once(input: &str, opts: &FormatOptions) -> Option { + let (parsed, errors) = jrsonnet_rowan_parser::parse(input); + if !errors.is_empty() { + return None; + } + + let ctx = FormatContext::new(opts.clone()); + + Some(dprint_core::formatting::format( + || { + let mut out = PrintItems::new(); + parsed.print(&mut out, &ctx); + out + }, + PrintOptions { + indent_width: if opts.indent == 0 { 3 } else { opts.indent }, + max_width: 100, + use_tabs: opts.indent == 0, + new_line_text: "\n", + }, + )) +} + +#[cfg(test)] +mod tests { + use super::{ + format_code_range, minimal_change_ranges, trim_trailing_whitespace, ByteRangeEdit, + }; + + #[test] + fn test_trim_trailing_whitespace_removes_spaces_and_tabs_at_line_end() { + assert_eq!(trim_trailing_whitespace("a \n\tb\t \n c\t"), "a\n\tb\n c"); + } + + #[test] + fn test_format_code_can_trim_final_newlines() { + let options = crate::FormatOptions { + trim_final_newlines: true, + ..crate::FormatOptions::default() + }; + assert_eq!( + super::format_code("{a:1}", &options), + Some("{\n\ta: 1,\n}".to_string()) + ); + } + + #[test] + fn test_format_code_range_applies_edit_when_change_is_inside_range() { + let input = "{\n a: 1,\n b:2,\n}\n"; + let edits = format_code_range( + input, + 11..17, + &crate::FormatOptions { + indent: 2, + ..crate::FormatOptions::default() + }, + ) + .expect("format"); + assert_eq!( + edits, + vec![ByteRangeEdit { + range: 14..14, + new_text: " ".to_string(), + }] + ); + } + + #[test] + fn test_format_code_range_returns_no_edits_when_change_is_outside_range() { + let input = "{\n a:1,\n b:2,\n}\n"; + let edits = format_code_range( + input, + 0..9, + &crate::FormatOptions { + indent: 2, + ..crate::FormatOptions::default() + }, + ) + .expect("format"); + assert!(edits.is_empty()); + } + + #[test] + fn test_minimal_change_ranges_respects_utf8_boundaries() { + let before = "{x: 'é'}\n"; + let after = "{x: 'ê'}\n"; + let (old_range, new_range) = + minimal_change_ranges(before, after).expect("should detect change"); + assert_eq!(old_range, 5..7); + assert_eq!(&after[new_range], "ê"); + } +} diff --git a/cmds/jrsonnet-fmt/src/children.rs b/cmds/jrsonnet-fmt/src/children.rs index 2ecd4210..0e8a1a51 100644 --- a/cmds/jrsonnet-fmt/src/children.rs +++ b/cmds/jrsonnet-fmt/src/children.rs @@ -80,14 +80,13 @@ pub fn children_between( ) } -pub fn should_start_with_newline(prev_inline: Option<&ChildTrivia>, tt: &ChildTrivia) -> bool { - count_newlines_before(tt) - + prev_inline - .map(count_newlines_after) - .unwrap_or_default() - - // First for previous item end, second for current item - >= 2 +fn extra_newlines_before_item(prev_inline: Option<&ChildTrivia>, tt: &ChildTrivia) -> u8 { + let newlines_between = + count_newlines_before(tt) + prev_inline.map(count_newlines_after).unwrap_or_default(); + + // A single newline is the default separator between items. + // Any extra newline means preserving one blank line. + u8::try_from(newlines_between.saturating_sub(1)).unwrap_or(u8::MAX) } fn count_newlines_before(tt: &ChildTrivia) -> usize { @@ -148,12 +147,15 @@ pub fn children( mem::take(&mut next) }; let last_child = current_child.replace(Child { - // First item should not start with newline - should_start_with_newline: had_some - && should_start_with_newline( + // First item should not start with blank lines. + extra_newlines_before: if had_some { + extra_newlines_before_item( current_child.as_ref().map(|c| &c.inline_trivia), &before_trivia, - ), + ) + } else { + 0 + }, before_trivia, value, inline_trivia: Vec::new(), @@ -200,7 +202,7 @@ pub fn children( } let ending_comments = EndingComments { - should_start_with_newline: should_start_with_newline( + extra_newlines_before: extra_newlines_before_item( current_child.as_ref().map(|c| &c.inline_trivia), &next, ), @@ -216,8 +218,8 @@ pub fn children( #[derive(Debug)] pub struct Child { - /// If this child has two newlines above in source code, so it needs to have it in the output - pub should_start_with_newline: bool, + /// Number of extra newlines before this child, beyond the default line break. + pub extra_newlines_before: u8, /// Comment before item, i.e /// /// ```ignore @@ -237,13 +239,13 @@ pub struct Child { } pub struct EndingComments { - /// If this child has two newlines above in source code, so it needs to have it in the output - pub should_start_with_newline: bool, + /// Number of extra newlines before ending comments, beyond the default line break. + pub extra_newlines_before: u8, pub trivia: ChildTrivia, } impl EndingComments { pub fn is_empty(&self) -> bool { - !self.should_start_with_newline && self.trivia.is_empty() + self.extra_newlines_before == 0 && self.trivia.is_empty() } pub fn extract_trailing(&mut self) -> ChildTrivia { mem::take(&mut self.trivia) diff --git a/cmds/jrsonnet-fmt/src/comments.rs b/cmds/jrsonnet-fmt/src/comments.rs index dba094ea..4fd0c70d 100644 --- a/cmds/jrsonnet-fmt/src/comments.rs +++ b/cmds/jrsonnet-fmt/src/comments.rs @@ -3,7 +3,11 @@ use std::string::String; use dprint_core::formatting::PrintItems; use jrsonnet_rowan_parser::{nodes::TriviaKind, AstToken}; -use crate::{children::ChildTrivia, p, pi}; +use crate::{ + children::ChildTrivia, + context::{CommentStyle, FormatContext}, + macros::{p, pi}, +}; pub enum CommentLocation { /// Above local, field, other things @@ -15,19 +19,24 @@ pub enum CommentLocation { } #[allow(clippy::too_many_lines, clippy::cognitive_complexity)] -pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut PrintItems) { +pub fn format_comments( + comments: &ChildTrivia, + loc: CommentLocation, + out: &mut PrintItems, + ctx: &FormatContext, +) { for c in comments { let Ok(c) = c else { let mut text = c.as_ref().unwrap_err() as &str; while !text.is_empty() { let pos = text.find(['\n', '\t']).unwrap_or(text.len()); let sliced = &text[..pos]; - p!(out, string(sliced.to_string())); + p!(out, ctx, string(sliced.to_string())); text = &text[pos..]; if !text.is_empty() { match text.as_bytes()[0] { - b'\n' => p!(out, nl), - b'\t' => p!(out, tab), + b'\n' => p!(out, ctx, nl), + b'\t' => p!(out, ctx, tab), _ => unreachable!(), } text = &text[1..]; @@ -70,9 +79,9 @@ pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut P } if lines.len() == 1 && !doc { if matches!(loc, CommentLocation::ItemInline) { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } - p!(out, str("/* ") string(lines[0].trim().to_string()) str(" */") nl); + p!(out, ctx, str("/* ") string(lines[0].trim().to_string()) str(" */") nl); } else if !lines.is_empty() { fn common_ws_prefix<'a>(a: &'a str, b: &str) -> &'a str { let offset = a @@ -107,36 +116,36 @@ pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut P .to_string(); } - p!(out, str("/*")); + p!(out, ctx, str("/*")); if doc { - p!(out, str("*")); + p!(out, ctx, str("*")); } - p!(out, nl); + p!(out, ctx, nl); for mut line in lines { if doc { - p!(out, str(" *")); + p!(out, ctx, str(" *")); } if line.is_empty() { - p!(out, nl); + p!(out, ctx, nl); } else { if doc { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } while let Some(new_line) = line.strip_prefix('\t') { if doc { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } else { - p!(out, tab); + p!(out, ctx, tab); } line = new_line.to_string(); } - p!(out, string(line.to_string()) nl); + p!(out, ctx, string(line.to_string()) nl); } } if doc { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } - p!(out, str("*/") nl); + p!(out, ctx, str("*/") nl); } } // TODO: Keep common padding for multiple continous lines of single-line comments @@ -156,26 +165,44 @@ pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut P // # Line2 // ``` TriviaKind::SingleLineHashComment => { + let text = c + .text() + .strip_prefix('#') + .expect("hash comment starts with #") + .trim(); + let prefix = match ctx.opts.comment_style { + CommentStyle::Slash => "// ", + CommentStyle::Hash | CommentStyle::Leave => "# ", + }; if matches!(loc, CommentLocation::ItemInline) { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } - p!(out, str("# ") string(c.text().strip_prefix('#').expect("hash comment starts with #").trim().to_string())); + p!(out, ctx, str(prefix) string(text.to_string())); if !matches!(loc, CommentLocation::ItemInline) { - p!(out, nl); + p!(out, ctx, nl); } } TriviaKind::SingleLineSlashComment => { + let text = c + .text() + .strip_prefix("//") + .expect("comment starts with //") + .trim(); + let prefix = match ctx.opts.comment_style { + CommentStyle::Hash => "# ", + CommentStyle::Slash | CommentStyle::Leave => "// ", + }; if matches!(loc, CommentLocation::ItemInline) { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } - p!(out, str("// ") string(c.text().strip_prefix("//").expect("comment starts with //").trim().to_string())); + p!(out, ctx, str(prefix) string(text.to_string())); if !matches!(loc, CommentLocation::ItemInline) { - p!(out, nl); + p!(out, ctx, nl); } } // Garbage in - garbage out - TriviaKind::ErrorCommentTooShort => p!(out, str("/*/")), - TriviaKind::ErrorCommentUnterminated => p!(out, string(c.text().to_string())), + TriviaKind::ErrorCommentTooShort => p!(out, ctx, str("/*/")), + TriviaKind::ErrorCommentUnterminated => p!(out, ctx, string(c.text().to_string())), } } } diff --git a/cmds/jrsonnet-fmt/src/context.rs b/cmds/jrsonnet-fmt/src/context.rs new file mode 100644 index 00000000..466a2db4 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/context.rs @@ -0,0 +1,149 @@ +//! Formatting context and options. + +use dprint_core::formatting::PrintItems; +use serde::{Deserialize, Serialize}; + +/// Comment style for formatting. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize)] +pub enum CommentStyle { + /// Convert all comments to hash-style (#). + #[serde(rename = "hash")] + Hash, + /// Convert all comments to slash-style (//). + #[serde(rename = "slash")] + Slash, + /// Leave comments as-is. + #[default] + #[serde(rename = "leave")] + Leave, +} + +impl<'de> Deserialize<'de> for CommentStyle { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let value = String::deserialize(deserializer)?; + Ok(match value.to_lowercase().as_str() { + "h" | "hash" => Self::Hash, + "s" | "slash" => Self::Slash, + _ => Self::Leave, + }) + } +} + +/// String literal style for formatting. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize)] +pub enum StringStyle { + /// Convert all strings to double quotes. + #[serde(rename = "double")] + Double, + /// Convert all strings to single quotes. + #[serde(rename = "single")] + Single, + /// Leave strings as-is. + #[default] + #[serde(rename = "leave")] + Leave, +} + +impl<'de> Deserialize<'de> for StringStyle { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let value = String::deserialize(deserializer)?; + Ok(match value.to_lowercase().as_str() { + "d" | "double" => Self::Double, + "s" | "single" => Self::Single, + _ => Self::Leave, + }) + } +} + +/// Formatting options that control output style. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(default)] +pub struct FormatOptions { + /// 0 for hard tabs, otherwise number of spaces. + pub indent: u8, + /// Maximum consecutive blank lines (default: 2). + pub max_blank_lines: u8, + /// Comment style conversion. + pub comment_style: CommentStyle, + /// String literal style conversion. + pub string_style: StringStyle, + /// Add padding inside arrays: [ x, y ] vs [x, y]. + pub pad_arrays: bool, + /// Add padding inside objects: { x: 1 } vs {x: 1}. + pub pad_objects: bool, + /// Use pretty field names (unquoted when possible). + pub pretty_field_names: bool, + /// Remove trailing spaces/tabs at the end of each line. + pub trim_trailing_whitespace: bool, + /// Remove all trailing newline characters from the formatted output. + pub trim_final_newlines: bool, +} + +impl Default for FormatOptions { + fn default() -> Self { + Self { + indent: 0, + max_blank_lines: 2, + comment_style: CommentStyle::default(), + string_style: StringStyle::default(), + pad_arrays: false, + pad_objects: true, + pretty_field_names: true, + trim_trailing_whitespace: false, + trim_final_newlines: false, + } + } +} + +/// Context passed through the formatting process. +#[derive(Debug, Clone)] +pub struct FormatContext { + pub opts: FormatOptions, +} + +impl FormatContext { + pub fn new(opts: FormatOptions) -> Self { + Self { opts } + } + + /// Emit extra blank lines (beyond the regular line break) up to configured max. + pub fn emit_blank_lines(&self, extra_newlines: u8, out: &mut PrintItems) { + let newlines_to_emit = extra_newlines.min(self.opts.max_blank_lines); + for _ in 0..newlines_to_emit { + out.push_signal(dprint_core::formatting::Signal::NewLine); + } + } +} + +#[cfg(test)] +mod tests { + use super::FormatOptions; + + #[test] + fn test_format_options_deserialize_in_range_indent() { + let parsed: Result = serde_json::from_value(serde_json::json!({ + "indent": 8 + })); + assert_eq!( + parsed.ok(), + Some(FormatOptions { + indent: 8, + ..FormatOptions::default() + }) + ); + } + + #[test] + fn test_format_options_reject_out_of_range_indent() { + let parsed: Result = serde_json::from_value(serde_json::json!({ + "indent": 300 + })); + assert_eq!(parsed.ok(), None); + } +} diff --git a/cmds/jrsonnet-fmt/src/lib.rs b/cmds/jrsonnet-fmt/src/lib.rs new file mode 100644 index 00000000..eb005903 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/lib.rs @@ -0,0 +1,12 @@ +//! Reusable Jsonnet formatter API used by CLI and LSP integrations. + +mod api; +mod children; +mod comments; +mod context; +mod macros; +mod printable; + +pub use api::{format_code, format_code_range, ByteRangeEdit}; +pub use context::{CommentStyle, FormatContext, FormatOptions, StringStyle}; +pub use printable::Printable; diff --git a/cmds/jrsonnet-fmt/src/macros.rs b/cmds/jrsonnet-fmt/src/macros.rs new file mode 100644 index 00000000..a769f25e --- /dev/null +++ b/cmds/jrsonnet-fmt/src/macros.rs @@ -0,0 +1,128 @@ +//! Formatting macros for building PrintItems. + +/// Create PrintItems with formatting DSL. +/// +/// Usage: `pi!(@i; ctx; str("text") nl {node} ...)` +macro_rules! pi { + (@i; $ctx:expr; $($t:tt)*) => {{ + #[allow(unused_mut)] + let mut o = dprint_core::formatting::PrintItems::new(); + let __ctx = $ctx; + pi!(@s; o, __ctx: $($t)*); + o + }}; + (@s; $o:ident, $ctx:ident: str($e:expr $(,)?) $($t:tt)*) => {{ + $o.push_str($e); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: string($e:expr $(,)?) $($t:tt)*) => {{ + $o.push_string($e); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: nl $($t:tt)*) => {{ + $o.push_signal(dprint_core::formatting::Signal::NewLine); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: tab $($t:tt)*) => {{ + $o.push_signal(dprint_core::formatting::Signal::Tab); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: >i $($t:tt)*) => {{ + $o.push_signal(dprint_core::formatting::Signal::StartIndent); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: {{ + $o.push_signal(dprint_core::formatting::Signal::FinishIndent); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: info($v:expr) $($t:tt)*) => {{ + $o.push_info($v); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: if($s:literal, $cond:expr, $($i:tt)*) $($t:tt)*) => {{ + use dprint_core::formatting::PrintItems; + let __ctx = $ctx; + $o.push_condition(dprint_core::formatting::conditions::if_true( + $s, + $cond.clone(), + { + let mut o = PrintItems::new(); + p!(o, __ctx, $($i)*); + o + }, + )); + pi!(@s; $o, __ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: if_else($s:literal, $cond:expr, $($i:tt)*)($($e:tt)+) $($t:tt)*) => {{ + use dprint_core::formatting::PrintItems; + let __ctx = $ctx; + $o.push_condition(dprint_core::formatting::conditions::if_true_or( + $s, + $cond.clone(), + { + let mut o = PrintItems::new(); + p!(o, __ctx, $($i)*); + o + }, + { + let mut o = PrintItems::new(); + p!(o, __ctx, $($e)*); + o + }, + )); + pi!(@s; $o, __ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: if_not($s:literal, $cond:expr, $($e:tt)*) $($t:tt)*) => {{ + use dprint_core::formatting::PrintItems; + let __ctx = $ctx; + $o.push_condition(dprint_core::formatting::conditions::if_true_or( + $s, + $cond.clone(), + { + let o = PrintItems::new(); + o + }, + { + let mut o = PrintItems::new(); + p!(o, __ctx, $($e)*); + o + }, + )); + pi!(@s; $o, __ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: {$expr:expr} $($t:tt)*) => {{ + $expr.print($o, $ctx); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: items($expr:expr) $($t:tt)*) => {{ + $o.extend($expr); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: if ($e:expr)($($then:tt)*) $($t:tt)*) => {{ + if $e { + pi!(@s; $o, $ctx: $($then)*); + } + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: ifelse ($e:expr)($($then:tt)*)($($else:tt)*) $($t:tt)*) => {{ + if $e { + pi!(@s; $o, $ctx: $($then)*); + } else { + pi!(@s; $o, $ctx: $($else)*); + } + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $i:ident, $ctx:ident:) => {} +} + +/// Push to existing PrintItems with formatting DSL. +/// +/// Usage: `p!(out, ctx, str("text") nl {node} ...)` +macro_rules! p { + ($o:ident, $ctx:ident, $($t:tt)*) => { + pi!(@s; $o, $ctx: $($t)*) + }; +} + +pub(crate) use p; +pub(crate) use pi; diff --git a/cmds/jrsonnet-fmt/src/main.rs b/cmds/jrsonnet-fmt/src/main.rs index f6099f70..7ea13496 100644 --- a/cmds/jrsonnet-fmt/src/main.rs +++ b/cmds/jrsonnet-fmt/src/main.rs @@ -1,710 +1,121 @@ +//! jrsonnet-fmt: Jsonnet code formatter +//! +//! A formatter for Jsonnet code using dprint-core as the formatting engine. + use std::{ - any::type_name, fs, io::{self, Write}, path::PathBuf, process, - rc::Rc, }; -use children::{children_between, trivia_before}; use clap::Parser; -use dprint_core::formatting::{ - condition_helpers::is_multiple_lines, condition_resolvers::true_resolver, - ConditionResolverContext, LineNumber, PrintItems, PrintOptions, -}; +use dprint_core::formatting::{PrintItems, PrintOptions}; use hi_doc::Formatting; -use jrsonnet_rowan_parser::{ - nodes::{ - Arg, ArgsDesc, Assertion, BinaryOperator, Bind, CompSpec, Destruct, DestructArrayPart, - DestructRest, Expr, ExprBase, FieldName, ForSpec, IfSpec, ImportKind, Literal, Member, - Name, Number, ObjBody, ObjLocal, ParamsDesc, SliceDesc, SourceFile, Stmt, Suffix, Text, - UnaryOperator, Visibility, - }, - AstNode, AstToken as _, SyntaxToken, -}; - -use crate::{ - children::trivia_after, - comments::{format_comments, CommentLocation}, -}; mod children; mod comments; +mod context; +mod macros; +mod printable; #[cfg(test)] mod tests; -pub trait Printable { - fn print(&self, out: &mut PrintItems); -} - -macro_rules! pi { - (@i; $($t:tt)*) => {{ - #[allow(unused_mut)] - let mut o = dprint_core::formatting::PrintItems::new(); - pi!(@s; o: $($t)*); - o - }}; - (@s; $o:ident: str($e:expr $(,)?) $($t:tt)*) => {{ - $o.push_str($e); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: string($e:expr $(,)?) $($t:tt)*) => {{ - $o.push_string($e); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: nl $($t:tt)*) => {{ - $o.push_signal(dprint_core::formatting::Signal::NewLine); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: tab $($t:tt)*) => {{ - $o.push_signal(dprint_core::formatting::Signal::Tab); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: >i $($t:tt)*) => {{ - $o.push_signal(dprint_core::formatting::Signal::StartIndent); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: {{ - $o.push_signal(dprint_core::formatting::Signal::FinishIndent); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: info($v:expr) $($t:tt)*) => {{ - $o.push_info($v); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: if($s:literal, $cond:expr, $($i:tt)*) $($t:tt)*) => {{ - $o.push_condition(dprint_core::formatting::conditions::if_true( - $s, - $cond.clone(), - { - let mut o = PrintItems::new(); - p!(o, $($i)*); - o - }, - )); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: if_else($s:literal, $cond:expr, $($i:tt)*)($($e:tt)+) $($t:tt)*) => {{ - $o.push_condition(dprint_core::formatting::conditions::if_true_or( - $s, - $cond.clone(), - { - let mut o = PrintItems::new(); - p!(o, $($i)*); - o - }, - { - let mut o = PrintItems::new(); - p!(o, $($e)*); - o - }, - )); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: if_not($s:literal, $cond:expr, $($e:tt)*) $($t:tt)*) => {{ - $o.push_condition(dprint_core::formatting::conditions::if_true_or( - $s, - $cond.clone(), - { - let o = PrintItems::new(); - o - }, - { - let mut o = PrintItems::new(); - p!(o, $($e)*); - o - }, - )); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: {$expr:expr} $($t:tt)*) => {{ - $expr.print($o); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: items($expr:expr) $($t:tt)*) => {{ - $o.extend($expr); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: if ($e:expr)($($then:tt)*) $($t:tt)*) => {{ - if $e { - pi!(@s; $o: $($then)*); - } - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: ifelse ($e:expr)($($then:tt)*)($($else:tt)*) $($t:tt)*) => {{ - if $e { - pi!(@s; $o: $($then)*); - } else { - pi!(@s; $o: $($else)*); - } - pi!(@s; $o: $($t)*); - }}; - (@s; $i:ident:) => {} -} -macro_rules! p { - ($o:ident, $($t:tt)*) => { - pi!(@s; $o: $($t)*) - }; -} -pub(crate) use p; -pub(crate) use pi; - -impl

Printable for Option

-where - P: Printable, -{ - fn print(&self, out: &mut PrintItems) { - if let Some(v) = self { - v.print(out); - } else { - p!( - out, - string(format!( - "/*missing {}*/", - type_name::

().replace("jrsonnet_rowan_parser::generated::nodes::", "") - ),) - ); - } - } -} - -impl Printable for SyntaxToken { - fn print(&self, out: &mut PrintItems) { - p!(out, string(self.to_string())); - } -} - -impl Printable for Text { - fn print(&self, out: &mut PrintItems) { - p!(out, string(format!("{}", self))); - } -} -impl Printable for Number { - fn print(&self, out: &mut PrintItems) { - p!(out, string(format!("{}", self))); - } -} - -impl Printable for Name { - fn print(&self, out: &mut PrintItems) { - p!(out, { self.ident_lit() }); - } -} - -impl Printable for DestructRest { - fn print(&self, out: &mut PrintItems) { - p!(out, str("...")); - if let Some(name) = self.into() { - p!(out, { name }); - } - } -} - -impl Printable for Destruct { - fn print(&self, out: &mut PrintItems) { - match self { - Self::DestructFull(f) => { - p!(out, { f.name() }); - } - Self::DestructSkip(_) => p!(out, str("?")), - Self::DestructArray(a) => { - p!(out, str("[") >i nl); - for el in a.destruct_array_parts() { - match el { - DestructArrayPart::DestructArrayElement(e) => { - p!(out, {e.destruct()} str(",") nl); - } - DestructArrayPart::DestructRest(d) => { - p!(out, {d} str(",") nl); - } - } - } - p!(out, { - p!(out, str("{") >i nl); - for item in o.destruct_object_fields() { - p!(out, { item.field() }); - if let Some(des) = item.destruct() { - p!(out, str(": ") {des}); - } - if let Some(def) = item.expr() { - p!(out, str(" = ") {def}); - } - p!(out, str(",") nl); - } - if let Some(rest) = o.destruct_rest() { - p!(out, {rest} nl); - } - p!(out, { - if let Some(id) = f.id() { - p!(out, { id }); - } else if let Some(str) = f.text() { - p!(out, { str }); - } else { - p!(out, str("/*missing FieldName*/")); - } - } - Self::FieldNameDynamic(d) => { - p!(out, str("[") {d.expr()} str("]")); - } - } +fn parse_string_style(s: &str) -> Result { + match s.to_lowercase().as_str() { + "d" | "double" => Ok(StringStyle::Double), + "s" | "single" => Ok(StringStyle::Single), + "l" | "leave" => Ok(StringStyle::Leave), + _ => Err(format!( + "Invalid string style '{}'. Use: d/double, s/single, l/leave", + s + )), } } -impl Printable for Visibility { - fn print(&self, out: &mut PrintItems) { - p!(out, string(self.to_string())); +fn parse_comment_style(s: &str) -> Result { + match s.to_lowercase().as_str() { + "h" | "hash" => Ok(CommentStyle::Hash), + "s" | "slash" => Ok(CommentStyle::Slash), + "l" | "leave" => Ok(CommentStyle::Leave), + _ => Err(format!( + "Invalid comment style '{}'. Use: h/hash, s/slash, l/leave", + s + )), } } -impl Printable for ObjLocal { - fn print(&self, out: &mut PrintItems) { - p!(out, str("local ") {self.bind()}); - } -} +#[derive(Parser)] +#[allow(clippy::struct_excessive_bools)] +struct Opts { + /// Treat input as code, reformat it instead of reading file. + #[clap(long, short = 'e')] + exec: bool, + /// Path to be reformatted if `--exec` if unset, otherwise code itself. + input: String, + /// Replace code with formatted in-place, instead of printing it to stdout. + /// Only applicable if `--exec` is unset. + #[clap(long, short = 'i')] + in_place: bool, -impl Printable for Assertion { - fn print(&self, out: &mut PrintItems) { - p!(out, str("assert ") {self.condition()}); - if self.colon_token().is_some() || self.message().is_some() { - p!(out, str(": ") {self.message()}); - } - } -} + /// Exit with error if formatted does not match input + #[arg(long)] + test: bool, + /// Number of spaces to indent with + /// + /// 0 for guess from input (default), and use hard tabs if unable to guess. + #[arg(long, default_value = "0")] + indent: u8, + /// Force hard tab for indentation + #[arg(long)] + hard_tabs: bool, -impl Printable for ParamsDesc { - fn print(&self, out: &mut PrintItems) { - p!(out, str("(") >i nl); - for param in self.params() { - p!(out, { param.destruct() }); - if param.assign_token().is_some() || param.expr().is_some() { - p!(out, str(" = ") {param.expr()}); - } - p!(out, str(",") nl); - } - p!(out, i nl)); - let (children, end_comments) = children_between::( - self.syntax().clone(), - self.l_paren_token().map(Into::into).as_ref(), - self.r_paren_token().map(Into::into).as_ref(), - None, - ); - let mut args = children.into_iter().peekable(); - while let Some(ele) = args.next() { - if ele.should_start_with_newline { - p!(out, nl); - } - format_comments(&ele.before_trivia, CommentLocation::AboveItem, out); - let arg = ele.value; - if arg.name().is_some() || arg.assign_token().is_some() { - p!(out, {arg.name()} str(" = ")); - } - let comma_between = if args.peek().is_some() { - true_resolver() - } else { - multi_line.clone() - }; - p!(out, {arg.expr()} if("arg comma", comma_between, str(",") if_not("between args", multi_line, str(" ")))); - format_comments(&ele.inline_trivia, CommentLocation::ItemInline, out); - p!(out, if("between args", multi_line, nl)); - } - if end_comments.should_start_with_newline { - p!(out, nl); - } - format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out); - p!(out, if("end args", multi_line, { - p!(out, { b.obj_local() }); - } - Self::MemberAssertStmt(ass) => { - p!(out, { ass.assertion() }); - } - Self::MemberFieldNormal(n) => { - p!(out, {n.field_name()} if(n.plus_token().is_some())({n.plus_token()}) {n.visibility()} str(" ") {n.expr()}); - } - Self::MemberFieldMethod(m) => { - p!(out, {m.field_name()} {m.params_desc()} {m.visibility()} str(" ") {m.expr()}); - } - } - } -} + /// String quote style: d/double, s/single, l/leave (default: l) + #[arg(long, value_parser = parse_string_style, default_value = "l")] + string_style: StringStyle, -impl Printable for ObjBody { - fn print(&self, out: &mut PrintItems) { - match self { - Self::ObjBodyComp(l) => { - let (children, mut end_comments) = children_between::( - l.syntax().clone(), - l.l_brace_token().map(Into::into).as_ref(), - Some( - &(l.comp_specs() - .next() - .expect("at least one spec is defined") - .syntax() - .clone()) - .into(), - ), - None, - ); - let trailing_for_comp = end_comments.extract_trailing(); - p!(out, str("{") >i nl); - for mem in children { - if mem.should_start_with_newline { - p!(out, nl); - } - format_comments(&mem.before_trivia, CommentLocation::AboveItem, out); - p!(out, {mem.value} str(",")); - format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out); - p!(out, nl); - } + /// Comment style: h/hash, s/slash, l/leave (default: l) + #[arg(long, value_parser = parse_comment_style, default_value = "l")] + comment_style: CommentStyle, - if end_comments.should_start_with_newline { - p!(out, nl); - } - format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out); + /// Add padding inside arrays: [ x, y ] instead of [x, y] + #[arg(long)] + pad_arrays: bool, - let (compspecs, end_comments) = children_between::( - l.syntax().clone(), - l.member_comps() - .last() - .map(|m| m.syntax().clone()) - .map(Into::into) - .or_else(|| l.l_brace_token().map(Into::into)) - .as_ref(), - l.r_brace_token().map(Into::into).as_ref(), - Some(trailing_for_comp), - ); - for mem in compspecs { - if mem.should_start_with_newline { - p!(out, nl); - } - format_comments(&mem.before_trivia, CommentLocation::AboveItem, out); - p!(out, { mem.value }); - format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out); - } - if end_comments.should_start_with_newline { - p!(out, nl); - } - format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out); + /// Remove padding inside objects: {x: 1} instead of { x: 1 } + #[arg(long)] + no_pad_objects: bool, - p!(out, nl { - let (children, end_comments) = children_between::( - l.syntax().clone(), - l.l_brace_token().map(Into::into).as_ref(), - l.r_brace_token().map(Into::into).as_ref(), - None, - ); - if children.is_empty() && end_comments.is_empty() { - p!(out, str("{ }")); - return; - } - p!(out, str("{") >i nl); - for (i, mem) in children.into_iter().enumerate() { - if mem.should_start_with_newline && i != 0 { - p!(out, nl); - } - format_comments(&mem.before_trivia, CommentLocation::AboveItem, out); - p!(out, {mem.value} str(",")); - format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out); - p!(out, nl); - } + /// Don't use pretty (unquoted) field names + #[arg(long)] + no_pretty_field_names: bool, - if end_comments.should_start_with_newline { - p!(out, nl); - } - format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out); - p!(out, { - p!(out, {d.into()} str(" = ") {d.value()}); - } - Self::BindFunction(f) => { - p!(out, {f.name()} {f.params()} str(" = ") {f.value()}); - } - } - } -} -impl Printable for Literal { - fn print(&self, out: &mut PrintItems) { - p!(out, string(self.syntax().to_string())); - } -} -impl Printable for ImportKind { - fn print(&self, out: &mut PrintItems) { - p!(out, string(self.syntax().to_string())); - } -} -impl Printable for ForSpec { - fn print(&self, out: &mut PrintItems) { - p!(out, str("for ") {self.bind()} str(" in ") {self.expr()}); - } -} -impl Printable for IfSpec { - fn print(&self, out: &mut PrintItems) { - p!(out, str("if ") {self.expr()}); - } -} -impl Printable for CompSpec { - fn print(&self, out: &mut PrintItems) { - match self { - Self::ForSpec(f) => f.print(out), - Self::IfSpec(i) => i.print(out), - } - } -} -impl Printable for Expr { - fn print(&self, out: &mut PrintItems) { - let (stmts, _ending) = children_between::( - self.syntax().clone(), - None, - self.expr_base() - .as_ref() - .map(ExprBase::syntax) - .cloned() - .map(Into::into) - .as_ref(), - None, - ); - for stmt in stmts { - p!(out, { stmt.value }); - } - p!(out, { self.expr_base() }); - let (suffixes, _ending) = children_between::( - self.syntax().clone(), - self.expr_base() - .as_ref() - .map(ExprBase::syntax) - .cloned() - .map(Into::into) - .as_ref(), - None, - None, - ); - for suffix in suffixes { - p!(out, { suffix.value }); - } - } -} -impl Printable for Suffix { - fn print(&self, out: &mut PrintItems) { - match self { - Self::SuffixIndex(i) => { - if i.question_mark_token().is_some() { - p!(out, str("?")); - } - p!(out, str(".") {i.index()}); - } - Self::SuffixIndexExpr(e) => { - if e.question_mark_token().is_some() { - p!(out, str(".?")); - } - p!(out, str("[") {e.index()} str("]")); - } - Self::SuffixSlice(d) => { - p!(out, { d.slice_desc() }); - } - Self::SuffixApply(a) => { - p!(out, { a.args_desc() }); - } - } - } -} -impl Printable for Stmt { - fn print(&self, out: &mut PrintItems) { - match self { - Self::StmtLocal(l) => { - let (binds, end_comments) = children_between::( - l.syntax().clone(), - l.local_kw_token().map(Into::into).as_ref(), - l.semi_token().map(Into::into).as_ref(), - None, - ); - if binds.len() == 1 { - let bind = &binds[0]; - format_comments(&bind.before_trivia, CommentLocation::AboveItem, out); - p!(out, str("local ") {bind.value}); - // TODO: keep end_comments, child.inline_trivia somehow, force multiple locals formatting in case of presence? - } else { - p!(out,str("local") >i nl); - for bind in binds { - if bind.should_start_with_newline { - p!(out, nl); - } - format_comments(&bind.before_trivia, CommentLocation::AboveItem, out); - p!(out, {bind.value} str(",")); - format_comments(&bind.inline_trivia, CommentLocation::ItemInline, out); - p!(out, nl); - } - if end_comments.should_start_with_newline { - p!(out, nl); - } - format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out); - p!(out, { - p!(out, {a.assertion()} str(";") nl); - } - } - } -} -impl Printable for ExprBase { - fn print(&self, out: &mut PrintItems) { - match self { - Self::ExprBinary(b) => { - p!(out, {b.lhs_work()} str(" ") {b.binary_operator()} str(" ") {b.rhs_work()}); - } - Self::ExprUnary(u) => p!(out, {u.unary_operator()} {u.rhs()}), - // Self::ExprSlice(s) => { - // p!(new: {s.expr()} {s.slice_desc()}) - // } - // Self::ExprIndex(i) => { - // p!(new: {i.expr()} str(".") {i.index()}) - // } - // Self::ExprIndexExpr(i) => p!(new: {i.base()} str("[") {i.index()} str("]")), - // Self::ExprApply(a) => { - // let mut pi = p!(new: {a.expr()} {a.args_desc()}); - // if a.tailstrict_kw_token().is_some() { - // p!(out,str(" tailstrict")); - // } - // pi - // } - Self::ExprObjExtend(ex) => { - p!(out, {ex.lhs_work()} str(" ") {ex.rhs_work()}); - } - Self::ExprParened(p) => { - p!(out, str("(") {p.expr()} str(")")); - } - Self::ExprString(s) => p!(out, { s.text() }), - Self::ExprNumber(n) => p!(out, { n.number() }), - Self::ExprArray(a) => { - p!(out, str("[") >i nl); - for el in a.exprs() { - p!(out, {el} str(",") nl); - } - p!(out, { - p!(out, { obj.obj_body() }); - } - Self::ExprArrayComp(arr) => { - p!(out, str("[") {arr.expr()}); - for spec in arr.comp_specs() { - p!(out, str(" ") {spec}); - } - p!(out, str("]")); - } - Self::ExprImport(v) => { - p!(out, {v.import_kind()} str(" ") {v.text()}); - } - Self::ExprVar(n) => p!(out, { n.name() }), - // Self::ExprLocal(l) => { - // } - Self::ExprIfThenElse(ite) => { - p!(out, str("if ") {ite.cond()} str(" then ") {ite.then().map(|t| t.expr())}); - if ite.else_kw_token().is_some() || ite.else_().is_some() { - p!(out, str(" else ") {ite.else_().map(|t| t.expr())}); - } - } - Self::ExprFunction(f) => p!(out, str("function") {f.params_desc()} nl {f.expr()}), - // Self::ExprAssert(a) => p!(new: {a.assertion()} str("; ") {a.expr()}), - Self::ExprError(e) => p!(out, str("error ") {e.expr()}), - Self::ExprLiteral(l) => { - p!(out, { l.literal() }); - } - } - } + /// Debug option: how many times to call reformatting in case of unstable dprint output resolution. + /// + /// 0 for not retrying to reformat. + #[arg(long, default_value = "0")] + conv_limit: usize, } -impl Printable for SourceFile { - fn print(&self, out: &mut PrintItems) { - let before = trivia_before( - self.syntax().clone(), - self.expr() - .map(|e| e.syntax().clone()) - .map(Into::into) - .as_ref(), - ); - let after = trivia_after( - self.syntax().clone(), - self.expr() - .map(|e| e.syntax().clone()) - .map(Into::into) - .as_ref(), - ); - format_comments(&before, CommentLocation::AboveItem, out); - p!(out, {self.expr()} nl); - format_comments(&after, CommentLocation::EndOfItems, out); - } +#[derive(thiserror::Error, Debug)] +enum Error { + #[error("--in-place is incompatible with --exec")] + InPlaceExec, + #[error("io: {0}")] + Io(#[from] io::Error), + #[error("persist: {0}")] + Persist(#[from] tempfile::PersistError), + #[error("parsing failed, refusing to reformat corrupted input")] + Parse, } -struct FormatOptions { - // 0 for hard tabs - indent: u8, -} fn format(input: &str, opts: &FormatOptions) -> Option { let (parsed, errors) = jrsonnet_rowan_parser::parse(input); if !errors.is_empty() { @@ -731,10 +142,13 @@ fn format(input: &str, opts: &FormatOptions) -> Option { // TODO: Verify how formatter interacts in cases of missing positional values, i.e `if cond then /*missing Expr*/ else residual`. return None; } + + let ctx = FormatContext::new(opts.clone()); + Some(dprint_core::formatting::format( || { let mut out = PrintItems::new(); - parsed.print(&mut out); + parsed.print(&mut out, &ctx); out }, PrintOptions { @@ -751,53 +165,7 @@ fn format(input: &str, opts: &FormatOptions) -> Option { )) } -#[derive(Parser)] -#[allow(clippy::struct_excessive_bools)] -struct Opts { - /// Treat input as code, reformat it instead of reading file. - #[clap(long, short = 'e')] - exec: bool, - /// Path to be reformatted if `--exec` if unset, otherwise code itself. - input: String, - /// Replace code with formatted in-place, instead of printing it to stdout. - /// Only applicable if `--exec` is unset. - #[clap(long, short = 'i')] - in_place: bool, - - /// Exit with error if formatted does not match input - #[arg(long)] - test: bool, - /// Number of spaces to indent with - /// - /// 0 for guess from input (default), and use hard tabs if unable to guess. - #[arg(long, default_value = "0")] - indent: u8, - /// Force hard tab for indentation - #[arg(long)] - hard_tabs: bool, - - /// Debug option: how many times to call reformatting in case of unstable dprint output resolution. - /// - /// 0 for not retrying to reformat. - #[arg(long, default_value = "0")] - conv_limit: usize, -} - -#[derive(thiserror::Error, Debug)] -enum Error { - #[error("--in-place is incompatible with --exec")] - InPlaceExec, - #[error("io: {0}")] - Io(#[from] io::Error), - #[error("persist: {0}")] - Persist(#[from] tempfile::PersistError), - #[error("parsing failed, refusing to reformat corrupted input")] - Parse, -} - fn main_result() -> Result<(), Error> { - eprintln!("jrsonnet-fmt is a prototype of a jsonnet code formatter, do not expect it to produce meaningful results right now."); - eprintln!("It is not expected for its output to match other implementations, it will be completly separate implementation with maybe different name."); let mut opts = Opts::parse(); let input = if opts.exec { if opts.in_place { @@ -814,21 +182,28 @@ fn main_result() -> Result<(), Error> { opts.hard_tabs = true; } + let format_opts = FormatOptions { + indent: if opts.indent == 0 || opts.hard_tabs { + 0 + } else { + opts.indent + }, + max_blank_lines: opts.max_blank_lines, + comment_style: opts.comment_style, + string_style: opts.string_style, + pad_arrays: opts.pad_arrays, + pad_objects: !opts.no_pad_objects, + pretty_field_names: !opts.no_pretty_field_names, + trim_trailing_whitespace: false, + trim_final_newlines: false, + }; + let mut iteration = 0; let mut formatted = input.clone(); let mut convergence_tmp; // https://github.com/dprint/dprint/pull/423 loop { - let Some(reformatted) = format( - &formatted, - &FormatOptions { - indent: if opts.indent == 0 || opts.hard_tabs { - 0 - } else { - opts.indent - }, - }, - ) else { + let Some(reformatted) = format(&formatted, &format_opts) else { return Err(Error::Parse); }; convergence_tmp = reformatted.trim().to_owned(); diff --git a/cmds/jrsonnet-fmt/src/printable.rs b/cmds/jrsonnet-fmt/src/printable.rs new file mode 100644 index 00000000..fa003c6a --- /dev/null +++ b/cmds/jrsonnet-fmt/src/printable.rs @@ -0,0 +1,614 @@ +//! Printable trait and implementations for AST nodes. + +use std::{any::type_name, rc::Rc}; + +use dprint_core::formatting::{ + condition_helpers::is_multiple_lines, condition_resolvers::true_resolver, + ConditionResolverContext, LineNumber, PrintItems, +}; +use jrsonnet_rowan_parser::{ + nodes::{ + Arg, ArgsDesc, Assertion, BinaryOperator, Bind, CompSpec, Destruct, DestructArrayPart, + DestructRest, Expr, ExprBase, FieldName, ForSpec, IfSpec, ImportKind, Literal, Member, + Name, Number, ObjBody, ObjLocal, ParamsDesc, SliceDesc, SourceFile, Stmt, Text, TextKind, + UnaryOperator, Visibility, + }, + AstNode, AstToken as _, SyntaxToken, +}; + +use crate::{ + children::{children_between, trivia_after, trivia_before}, + comments::{format_comments, CommentLocation}, + context::{FormatContext, StringStyle}, + macros::{p, pi}, +}; + +/// Trait for AST nodes that can be formatted. +pub trait Printable { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext); +} + +impl

Printable for Option

+where + P: Printable, +{ + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + if let Some(v) = self { + v.print(out, ctx); + } else { + p!( + out, + ctx, + string(format!( + "/*missing {}*/", + type_name::

().replace("jrsonnet_rowan_parser::generated::nodes::", "") + ),) + ); + } + } +} + +impl Printable for SyntaxToken { + fn print(&self, out: &mut PrintItems, _ctx: &FormatContext) { + p!(out, _ctx, string(self.to_string())); + } +} + +impl Printable for Text { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + let raw = format!("{}", self); + + // Only convert simple single/double quoted strings + let converted = match (self.kind(), ctx.opts.string_style) { + // Leave as-is + (_, StringStyle::Leave) => raw, + // Already in target style + (TextKind::StringDouble, StringStyle::Double) + | (TextKind::StringSingle, StringStyle::Single) => raw, + // Convert single to double + (TextKind::StringSingle, StringStyle::Double) => convert_string_quotes(&raw, '\'', '"'), + // Convert double to single + (TextKind::StringDouble, StringStyle::Single) => convert_string_quotes(&raw, '"', '\''), + // Don't convert verbatim, block, or error strings + _ => raw, + }; + + p!(out, ctx, string(converted)); + } +} + +/// Convert a string from one quote style to another. +/// Handles escape sequences properly. +fn convert_string_quotes(s: &str, from_quote: char, to_quote: char) -> String { + // Strip the outer quotes + let inner = &s[1..s.len() - 1]; + + let mut result = String::with_capacity(s.len() + 2); + result.push(to_quote); + + let mut chars = inner.chars().peekable(); + while let Some(c) = chars.next() { + if c == '\\' { + if let Some(&next) = chars.peek() { + if next == from_quote { + // \' or \" - unescape since we're changing quote style + chars.next(); + result.push(from_quote); + } else { + // Keep other escapes as-is + result.push('\\'); + } + } else { + result.push('\\'); + } + } else if c == to_quote { + // Escape the new quote character + result.push('\\'); + result.push(c); + } else { + result.push(c); + } + } + + result.push(to_quote); + result +} +impl Printable for Number { + fn print(&self, out: &mut PrintItems, _ctx: &FormatContext) { + p!(out, _ctx, string(format!("{}", self))); + } +} + +impl Printable for Name { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, { self.ident_lit() }); + } +} + +impl Printable for DestructRest { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("...")); + if let Some(name) = self.into() { + p!(out, ctx, { name }); + } + } +} + +impl Printable for Destruct { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + match self { + Self::DestructFull(f) => { + p!(out, ctx, { f.name() }); + } + Self::DestructSkip(_) => p!(out, ctx, str("?")), + Self::DestructArray(a) => { + p!(out, ctx, str("[") >i nl); + for el in a.destruct_array_parts() { + match el { + DestructArrayPart::DestructArrayElement(e) => { + p!(out, ctx, {e.destruct()} str(",") nl); + } + DestructArrayPart::DestructRest(d) => { + p!(out, ctx, {d} str(",") nl); + } + } + } + p!(out, ctx, { + p!(out, ctx, str("{") >i nl); + for item in o.destruct_object_fields() { + p!(out, ctx, { item.field() }); + if let Some(des) = item.destruct() { + p!(out, ctx, str(": ") {des}); + } + if let Some(def) = item.expr() { + p!(out, ctx, str(" = ") {def}); + } + p!(out, ctx, str(",") nl); + } + if let Some(rest) = o.destruct_rest() { + p!(out, ctx, {rest} nl); + } + p!(out, ctx, { + if let Some(id) = f.id() { + p!(out, ctx, { id }); + } else if let Some(str) = f.text() { + p!(out, ctx, { str }); + } else { + p!(out, ctx, str("/*missing FieldName*/")); + } + } + Self::FieldNameDynamic(d) => { + p!(out, ctx, str("[") {d.expr()} str("]")); + } + } + } +} + +impl Printable for Visibility { + fn print(&self, out: &mut PrintItems, _ctx: &FormatContext) { + p!(out, _ctx, string(self.to_string())); + } +} + +impl Printable for ObjLocal { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("local ") {self.bind()}); + } +} + +impl Printable for Assertion { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("assert ") {self.condition()}); + if self.colon_token().is_some() || self.message().is_some() { + p!(out, ctx, str(": ") {self.message()}); + } + } +} + +impl Printable for ParamsDesc { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("(") >i nl); + for param in self.params() { + p!(out, ctx, { param.destruct() }); + if param.assign_token().is_some() || param.expr().is_some() { + p!(out, ctx, str(" = ") {param.expr()}); + } + p!(out, ctx, str(",") nl); + } + p!(out, ctx, i nl)); + let (children, end_comments) = children_between::( + self.syntax().clone(), + self.l_paren_token().map(Into::into).as_ref(), + self.r_paren_token().map(Into::into).as_ref(), + None, + ); + let mut args = children.into_iter().peekable(); + while let Some(ele) = args.next() { + ctx.emit_blank_lines(ele.extra_newlines_before, out); + format_comments(&ele.before_trivia, CommentLocation::AboveItem, out, ctx); + let arg = ele.value; + if arg.name().is_some() || arg.assign_token().is_some() { + p!(out, ctx, {arg.name()} str(" = ")); + } + let comma_between = if args.peek().is_some() { + true_resolver() + } else { + multi_line.clone() + }; + p!(out, ctx, {arg.expr()} if("arg comma", comma_between, str(",") if_not("between args", multi_line, str(" ")))); + format_comments(&ele.inline_trivia, CommentLocation::ItemInline, out, ctx); + p!(out, ctx, if("between args", multi_line, nl)); + } + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + p!(out, ctx, if("end args", multi_line, { + p!(out, ctx, { b.obj_local() }); + } + Self::MemberAssertStmt(ass) => { + p!(out, ctx, { ass.assertion() }); + } + Self::MemberFieldNormal(n) => { + p!(out, ctx, {n.field_name()} if(n.plus_token().is_some())({n.plus_token()}) {n.visibility()} str(" ") {n.expr()}); + } + Self::MemberFieldMethod(m) => { + p!(out, ctx, {m.field_name()} {m.params_desc()} {m.visibility()} str(" ") {m.expr()}); + } + } + } +} + +impl Printable for ObjBody { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + match self { + Self::ObjBodyComp(l) => { + let (children, mut end_comments) = children_between::( + l.syntax().clone(), + l.l_brace_token().map(Into::into).as_ref(), + Some( + &(l.comp_specs() + .next() + .expect("at least one spec is defined") + .syntax() + .clone()) + .into(), + ), + None, + ); + let trailing_for_comp = end_comments.extract_trailing(); + p!(out, ctx, str("{") >i nl); + for mem in children { + ctx.emit_blank_lines(mem.extra_newlines_before, out); + format_comments(&mem.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, {mem.value} str(",")); + format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out, ctx); + p!(out, ctx, nl); + } + + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + + let (compspecs, end_comments) = children_between::( + l.syntax().clone(), + l.member_comps() + .last() + .map(|m| m.syntax().clone()) + .map(Into::into) + .or_else(|| l.l_brace_token().map(Into::into)) + .as_ref(), + l.r_brace_token().map(Into::into).as_ref(), + Some(trailing_for_comp), + ); + for mem in compspecs { + ctx.emit_blank_lines(mem.extra_newlines_before, out); + format_comments(&mem.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, { mem.value }); + format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out, ctx); + } + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + + p!(out, ctx, nl { + let (children, end_comments) = children_between::( + l.syntax().clone(), + l.l_brace_token().map(Into::into).as_ref(), + l.r_brace_token().map(Into::into).as_ref(), + None, + ); + if children.is_empty() && end_comments.is_empty() { + if ctx.opts.pad_objects { + p!(out, ctx, str("{ }")); + } else { + p!(out, ctx, str("{}")); + } + return; + } + p!(out, ctx, str("{") >i nl); + for (i, mem) in children.into_iter().enumerate() { + // Don't emit blank line before first item. + ctx.emit_blank_lines(if i == 0 { 0 } else { mem.extra_newlines_before }, out); + format_comments(&mem.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, {mem.value} str(",")); + format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out, ctx); + p!(out, ctx, nl); + } + + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + p!(out, ctx, { + p!(out, ctx, {d.into()} str(" = ") {d.value()}); + } + Self::BindFunction(f) => { + p!(out, ctx, {f.name()} {f.params()} str(" = ") {f.value()}); + } + } + } +} +impl Printable for Literal { + fn print(&self, out: &mut PrintItems, _ctx: &FormatContext) { + p!(out, _ctx, string(self.syntax().to_string())); + } +} +impl Printable for ImportKind { + fn print(&self, out: &mut PrintItems, _ctx: &FormatContext) { + p!(out, _ctx, string(self.syntax().to_string())); + } +} +impl Printable for ForSpec { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("for ") {self.bind()} str(" in ") {self.expr()}); + } +} +impl Printable for IfSpec { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("if ") {self.expr()}); + } +} +impl Printable for CompSpec { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + match self { + Self::ForSpec(f) => f.print(out, ctx), + Self::IfSpec(i) => i.print(out, ctx), + } + } +} +impl Printable for Expr { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + let (stmts, _ending) = children_between::( + self.syntax().clone(), + None, + self.expr_base() + .as_ref() + .map(ExprBase::syntax) + .cloned() + .map(Into::into) + .as_ref(), + None, + ); + for stmt in stmts { + p!(out, ctx, { stmt.value }); + } + p!(out, ctx, { self.expr_base() }); + } +} +impl Printable for Stmt { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + match self { + Self::StmtLocal(l) => { + let (binds, end_comments) = children_between::( + l.syntax().clone(), + l.local_kw_token().map(Into::into).as_ref(), + l.semi_token().map(Into::into).as_ref(), + None, + ); + if binds.len() == 1 { + let bind = &binds[0]; + format_comments(&bind.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, str("local ") {bind.value}); + // TODO: keep end_comments, child.inline_trivia somehow, force multiple locals formatting in case of presence? + } else { + p!(out, ctx, str("local") >i nl); + for bind in binds { + ctx.emit_blank_lines(bind.extra_newlines_before, out); + format_comments(&bind.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, {bind.value} str(",")); + format_comments(&bind.inline_trivia, CommentLocation::ItemInline, out, ctx); + p!(out, ctx, nl); + } + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + p!(out, ctx, { + p!(out, ctx, {a.assertion()} str(";") nl); + } + } + } +} +impl Printable for ExprBase { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + match self { + Self::ExprBinary(b) => { + p!(out, ctx, {b.lhs_work()} str(" ") {b.binary_operator()} str(" ") {b.rhs_work()}); + } + Self::ExprUnary(u) => p!(out, ctx, {u.unary_operator()} {u.rhs()}), + Self::ExprField(f) => { + p!(out, ctx, { f.base() }); + if f.question_mark_token().is_some() { + p!(out, ctx, str("?")); + } + p!(out, ctx, str(".") {f.field()}); + } + Self::ExprIndex(i) => { + p!(out, ctx, { i.base() }); + if i.question_mark_token().is_some() { + p!(out, ctx, str("?")); + } + if i.dot_token().is_some() { + p!(out, ctx, str(".")); + } + p!(out, ctx, str("[") {i.index()} str("]")); + } + Self::ExprSlice(s) => { + p!(out, ctx, {s.base()} {s.slice_desc()}); + } + Self::ExprCall(c) => { + p!(out, ctx, {c.callee()} {c.args_desc()}); + if c.tailstrict_kw_token().is_some() { + p!(out, ctx, str(" tailstrict")); + } + } + Self::ExprObjExtend(ex) => { + p!(out, ctx, {ex.lhs_work()} str(" ") {ex.rhs_work()}); + } + Self::ExprParened(par) => { + p!(out, ctx, str("(") {par.expr()} str(")")); + } + Self::ExprString(s) => p!(out, ctx, { s.text() }), + Self::ExprNumber(n) => p!(out, ctx, { n.number() }), + Self::ExprArray(a) => { + let (children, end_comments) = children_between::( + a.syntax().clone(), + a.l_brack_token().map(Into::into).as_ref(), + a.r_brack_token().map(Into::into).as_ref(), + None, + ); + if children.is_empty() && end_comments.is_empty() { + if ctx.opts.pad_arrays { + p!(out, ctx, str("[ ]")); + } else { + p!(out, ctx, str("[]")); + } + return; + } + p!(out, ctx, str("[") >i nl); + for (i, el) in children.into_iter().enumerate() { + ctx.emit_blank_lines(if i == 0 { 0 } else { el.extra_newlines_before }, out); + format_comments(&el.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, {el.value} str(",")); + format_comments(&el.inline_trivia, CommentLocation::ItemInline, out, ctx); + p!(out, ctx, nl); + } + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + p!(out, ctx, { + p!(out, ctx, { obj.obj_body() }); + } + Self::ExprArrayComp(arr) => { + p!(out, ctx, str("[") {arr.expr()}); + for spec in arr.comp_specs() { + p!(out, ctx, str(" ") {spec}); + } + p!(out, ctx, str("]")); + } + Self::ExprImport(v) => { + p!(out, ctx, {v.import_kind()} str(" ") {v.text()}); + } + Self::ExprVar(n) => p!(out, ctx, { n.name() }), + // Self::ExprLocal(l) => { + // } + Self::ExprIfThenElse(ite) => { + p!(out, ctx, str("if ") {ite.cond()} str(" then ") {ite.then().map(|t| t.expr())}); + if ite.else_kw_token().is_some() || ite.else_().is_some() { + p!(out, ctx, str(" else ") {ite.else_().map(|t| t.expr())}); + } + } + Self::ExprFunction(f) => p!(out, ctx, str("function") {f.params_desc()} nl {f.expr()}), + // Self::ExprAssert(a) => p!(new: {a.assertion()} str("; ") {a.expr()}), + Self::ExprError(e) => p!(out, ctx, str("error ") {e.expr()}), + Self::ExprLiteral(l) => { + p!(out, ctx, { l.literal() }); + } + } + } +} + +impl Printable for SourceFile { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + let before = trivia_before( + self.syntax().clone(), + self.expr() + .map(|e| e.syntax().clone()) + .map(Into::into) + .as_ref(), + ); + let after = trivia_after( + self.syntax().clone(), + self.expr() + .map(|e| e.syntax().clone()) + .map(Into::into) + .as_ref(), + ); + format_comments(&before, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, {self.expr()} nl); + format_comments(&after, CommentLocation::EndOfItems, out, ctx); + } +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_comments.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_comments.snap new file mode 100644 index 00000000..40885a7f --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_comments.snap @@ -0,0 +1,10 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat(indoc!(\"[\n\t\t\t// comment before\n\t\t\t1,\n\t\t\t2, // inline comment\n\t\t\t// trailing comment\n\t\t]\"))" +--- +[ + // comment before + 1, + 2, // inline comment + // trailing comment +] diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_elements.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_elements.snap new file mode 100644 index 00000000..d5f0a812 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_elements.snap @@ -0,0 +1,9 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat(\"[1, 2, 3]\")" +--- +[ + 1, + 2, + 3, +] diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_hash_to_slash.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_hash_to_slash.snap new file mode 100644 index 00000000..66489bba --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_hash_to_slash.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(indoc!(\"{\n\t\t\t\t# hash comment\n\t\t\t\ta: 1,\n\t\t\t}\"),\nFormatOptions { comment_style: CommentStyle::Slash, ..Default::default() })" +--- +{ + // hash comment + a: 1, +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_leave.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_leave.snap new file mode 100644 index 00000000..296c74aa --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_leave.snap @@ -0,0 +1,10 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(indoc!(\"{\n\t\t\t\t# hash stays hash\n\t\t\t\ta: 1,\n\t\t\t\t// slash stays slash\n\t\t\t\tb: 2,\n\t\t\t}\"),\nFormatOptions { comment_style: CommentStyle::Leave, ..Default::default() })" +--- +{ + # hash stays hash + a: 1, + // slash stays slash + b: 2, +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_slash_to_hash.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_slash_to_hash.snap new file mode 100644 index 00000000..716b258b --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_slash_to_hash.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(indoc!(\"{\n\t\t\t\t// slash comment\n\t\t\t\ta: 1,\n\t\t\t}\"),\nFormatOptions { comment_style: CommentStyle::Hash, ..Default::default() })" +--- +{ + # slash comment + a: 1, +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_no_padding.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_no_padding.snap new file mode 100644 index 00000000..b0164a50 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_no_padding.snap @@ -0,0 +1,5 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat(\"[]\")" +--- +[] diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_with_padding.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_with_padding.snap new file mode 100644 index 00000000..8b40d00a --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_with_padding.snap @@ -0,0 +1,5 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(\"[]\", FormatOptions\n{ pad_arrays: true, ..Default::default() })" +--- +[ ] diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_default_padding.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_default_padding.snap new file mode 100644 index 00000000..a7a3fb7a --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_default_padding.snap @@ -0,0 +1,5 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat(\"{}\")" +--- +{ } diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_no_padding.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_no_padding.snap new file mode 100644 index 00000000..49a952c4 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_no_padding.snap @@ -0,0 +1,5 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(\"{}\", FormatOptions\n{ pad_objects: false, ..Default::default() })" +--- +{} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_double_to_single.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_double_to_single.snap new file mode 100644 index 00000000..c541040e --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_double_to_single.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(r#\"{ a: \"hello\", b: \"world\" }\"#, FormatOptions\n{ string_style: StringStyle::Single, ..Default::default() })" +--- +{ + a: 'hello', + b: 'world', +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_leave.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_leave.snap new file mode 100644 index 00000000..7c1a6bf4 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_leave.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(r#\"{ a: \"double\", b: 'single' }\"#, FormatOptions\n{ string_style: StringStyle::Leave, ..Default::default() })" +--- +{ + a: "double", + b: 'single', +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_single_to_double.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_single_to_double.snap new file mode 100644 index 00000000..ea466526 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_single_to_double.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(\"{ a: 'hello', b: 'world' }\", FormatOptions\n{ string_style: StringStyle::Double, ..Default::default() })" +--- +{ + a: "hello", + b: "world", +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_with_escapes.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_with_escapes.snap new file mode 100644 index 00000000..e141d89a --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_with_escapes.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(r#\"{ a: \"it's a test\", b: 'say \"hello\"' }\"#, FormatOptions\n{ string_style: StringStyle::Single, ..Default::default() })" +--- +{ + a: 'it\'s a test', + b: 'say "hello"', +} diff --git a/cmds/jrsonnet-fmt/src/tests.rs b/cmds/jrsonnet-fmt/src/tests.rs index eb19a3f0..940cf938 100644 --- a/cmds/jrsonnet-fmt/src/tests.rs +++ b/cmds/jrsonnet-fmt/src/tests.rs @@ -1,15 +1,24 @@ use dprint_core::formatting::{PrintItems, PrintOptions}; use indoc::indoc; +use rstest::rstest; -use crate::Printable; +use crate::{ + context::{CommentStyle, FormatContext, FormatOptions, StringStyle}, + printable::Printable, +}; fn reformat(input: &str) -> String { + reformat_with_opts(input, FormatOptions::default()) +} + +fn reformat_with_opts(input: &str, opts: FormatOptions) -> String { let (source, _) = jrsonnet_rowan_parser::parse(input); + let ctx = FormatContext::new(opts); dprint_core::formatting::format( || { let mut out = PrintItems::new(); - source.print(&mut out); + source.print(&mut out, &ctx); out }, PrintOptions { @@ -77,3 +86,198 @@ fn complex_comments_snapshot() { }" ))); } + +#[test] +fn empty_array_no_padding() { + insta::assert_snapshot!(reformat("[]")); +} + +#[test] +fn empty_array_with_padding() { + insta::assert_snapshot!(reformat_with_opts( + "[]", + FormatOptions { + pad_arrays: true, + ..Default::default() + } + )); +} + +#[test] +fn empty_object_default_padding() { + insta::assert_snapshot!(reformat("{}")); +} + +#[test] +fn empty_object_no_padding() { + insta::assert_snapshot!(reformat_with_opts( + "{}", + FormatOptions { + pad_objects: false, + ..Default::default() + } + )); +} + +#[test] +fn array_with_elements() { + insta::assert_snapshot!(reformat("[1, 2, 3]")); +} + +#[test] +fn array_with_comments() { + insta::assert_snapshot!(reformat(indoc!( + "[ + // comment before + 1, + 2, // inline comment + // trailing comment + ]" + ))); +} + +#[test] +fn comment_style_hash_to_slash() { + insta::assert_snapshot!(reformat_with_opts( + indoc!( + "{ + # hash comment + a: 1, + }" + ), + FormatOptions { + comment_style: CommentStyle::Slash, + ..Default::default() + } + )); +} + +#[test] +fn comment_style_slash_to_hash() { + insta::assert_snapshot!(reformat_with_opts( + indoc!( + "{ + // slash comment + a: 1, + }" + ), + FormatOptions { + comment_style: CommentStyle::Hash, + ..Default::default() + } + )); +} + +#[test] +fn comment_style_leave() { + insta::assert_snapshot!(reformat_with_opts( + indoc!( + "{ + # hash stays hash + a: 1, + // slash stays slash + b: 2, + }" + ), + FormatOptions { + comment_style: CommentStyle::Leave, + ..Default::default() + } + )); +} + +#[test] +fn string_style_single_to_double() { + insta::assert_snapshot!(reformat_with_opts( + "{ a: 'hello', b: 'world' }", + FormatOptions { + string_style: StringStyle::Double, + ..Default::default() + } + )); +} + +#[test] +fn string_style_double_to_single() { + insta::assert_snapshot!(reformat_with_opts( + r#"{ a: "hello", b: "world" }"#, + FormatOptions { + string_style: StringStyle::Single, + ..Default::default() + } + )); +} + +#[test] +fn string_style_with_escapes() { + // String with quotes that need escaping when converted + insta::assert_snapshot!(reformat_with_opts( + r#"{ a: "it's a test", b: 'say "hello"' }"#, + FormatOptions { + string_style: StringStyle::Single, + ..Default::default() + } + )); +} + +#[test] +fn string_style_leave() { + insta::assert_snapshot!(reformat_with_opts( + r#"{ a: "double", b: 'single' }"#, + FormatOptions { + string_style: StringStyle::Leave, + ..Default::default() + } + )); +} + +#[rstest] +#[case( + 0, + indoc!( + "{ + a: 1, + b: 2, + }" + ) +)] +#[case( + 1, + indoc!( + "{ + a: 1, + + b: 2, + }" + ) +)] +#[case( + 2, + indoc!( + "{ + a: 1, + + + b: 2, + }" + ) +)] +fn max_blank_lines_limits_preserved_spacing(#[case] max_blank_lines: u8, #[case] expected: &str) { + let formatted = reformat_with_opts( + indoc!( + "{ + a: 1, + + + + b: 2, + }" + ), + FormatOptions { + max_blank_lines, + ..FormatOptions::default() + }, + ); + + assert_eq!(formatted, format!("{expected}\n")); +} diff --git a/cmds/jrsonnet-lsp/Cargo.toml b/cmds/jrsonnet-lsp/Cargo.toml new file mode 100644 index 00000000..dbfa9a29 --- /dev/null +++ b/cmds/jrsonnet-lsp/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "jrsonnet-lsp-bin" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true + +[[bin]] +name = "jrsonnet-lsp" +path = "src/main.rs" + +[dependencies] +jrsonnet-lsp = { path = "../../crates/jrsonnet-lsp" } +clap = { version = "4.5", features = ["derive"] } +anyhow = "1.0" +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter"] } + +[lints] +workspace = true diff --git a/cmds/jrsonnet-lsp/src/main.rs b/cmds/jrsonnet-lsp/src/main.rs new file mode 100644 index 00000000..c2775b59 --- /dev/null +++ b/cmds/jrsonnet-lsp/src/main.rs @@ -0,0 +1,47 @@ +//! Jsonnet Language Server binary. +//! +//! This is the CLI entry point for the jrsonnet language server. +//! The server communicates over stdio using the Language Server Protocol. + +use anyhow::Result; +use clap::Parser; +use tracing::Level; +use tracing_subscriber::EnvFilter; + +#[derive(Parser, Debug)] +#[command(name = "jrsonnet-lsp")] +#[command(about = "Jsonnet Language Server")] +#[command(version)] +struct Args { + /// Log level (trace, debug, info, warn, error) + #[arg(long, default_value = "info")] + log_level: Level, + + /// Log file path (logs to stderr if not specified) + #[arg(long)] + log_file: Option, +} + +fn main() -> Result<()> { + let args = Args::parse(); + + // Set up logging + let filter = EnvFilter::from_default_env().add_directive(args.log_level.into()); + + if let Some(log_file) = args.log_file { + let file = std::fs::File::create(&log_file)?; + tracing_subscriber::fmt() + .with_env_filter(filter) + .with_writer(file) + .with_ansi(false) + .init(); + } else { + tracing_subscriber::fmt() + .with_env_filter(filter) + .with_writer(std::io::stderr) + .init(); + } + + // Run the LSP server + jrsonnet_lsp::run_stdio() +} diff --git a/cmds/rtk/src/commands/apply.rs b/cmds/rtk/src/commands/apply.rs index 4e85c2b0..4abf7cb2 100644 --- a/cmds/rtk/src/commands/apply.rs +++ b/cmds/rtk/src/commands/apply.rs @@ -10,15 +10,16 @@ use clap::{Args, ValueEnum}; use serde::{Deserialize, Serialize}; use tracing::instrument; -use super::diff::ColorMode; -use super::util::{ - build_eval_opts, create_tokio_runtime, extract_manifests, get_or_create_connection, - process_manifests, prompt_confirmation, setup_diff_engine, validate_dry_run, DiffEngineConfig, - UnimplementedArgs, -}; - // Re-export AutoApprove for backwards compatibility pub use super::util::AutoApprove; +use super::{ + diff::ColorMode, + util::{ + build_eval_opts, create_tokio_runtime, extract_manifests, get_or_create_connection, + process_manifests, prompt_confirmation, setup_diff_engine, validate_dry_run, + DiffEngineConfig, UnimplementedArgs, + }, +}; use crate::{ eval::EvalOpts, k8s::{ diff --git a/cmds/rtk/src/commands/prune.rs b/cmds/rtk/src/commands/prune.rs index f1b1a33d..d00e4581 100644 --- a/cmds/rtk/src/commands/prune.rs +++ b/cmds/rtk/src/commands/prune.rs @@ -9,15 +9,16 @@ use anyhow::{Context, Result}; use clap::Args; use tracing::instrument; -use super::diff::ColorMode; -use super::util::{ - build_eval_opts, create_tokio_runtime, extract_manifests, get_or_create_connection, - process_manifests, prompt_confirmation, setup_diff_engine, validate_dry_run, DiffEngineConfig, - UnimplementedArgs, -}; - // Re-export AutoApprove for backwards compatibility pub use super::util::AutoApprove; +use super::{ + diff::ColorMode, + util::{ + build_eval_opts, create_tokio_runtime, extract_manifests, get_or_create_connection, + process_manifests, prompt_confirmation, setup_diff_engine, validate_dry_run, + DiffEngineConfig, UnimplementedArgs, + }, +}; use crate::{ eval::EvalOpts, k8s::{ diff --git a/cmds/rtk/src/eval.rs b/cmds/rtk/src/eval.rs index 4628c7d6..da9c302d 100644 --- a/cmds/rtk/src/eval.rs +++ b/cmds/rtk/src/eval.rs @@ -7,9 +7,8 @@ use std::{collections::HashMap, fs, path::Path}; use anyhow::{Context, Result}; use jrsonnet_evaluator::{ - function::TlaArg, gc::GcHashMap, set_lenient_super, set_skip_assertions, - stack::set_stack_depth_limit, trace::PathResolver, FileImportResolver, IStr, ImportResolver, - State, + function::TlaArg, gc::GcHashMap, stack::set_stack_depth_limit, trace::PathResolver, + FileImportResolver, IStr, ImportResolver, State, }; use jrsonnet_stdlib::ContextInitializer; use tracing::instrument; @@ -120,12 +119,6 @@ pub fn eval_with_resolver( spec: Option, opts: EvalOpts, ) -> Result { - set_skip_assertions(false); - - // Enable lenient super mode to handle mixins that reference super fields that don't exist yet - // This works around go-jsonnet compatibility issues in libraries like k8s-libsonnet - set_lenient_super(true); - // Set up the evaluator state let state = setup_state(import_resolver, config_base, &spec, &opts)?; @@ -200,7 +193,7 @@ fn setup_state( } } - apply_rtk_config(&context_init, &config); + let use_go_style_floats = apply_rtk_config(&context_init, &config); // Add external variables from spec (environment config) if let Some(env) = spec { @@ -232,7 +225,10 @@ fn setup_state( let mut builder = State::builder(); builder .import_resolver(import_resolver) - .context_initializer(context_init); + .context_initializer(context_init) + .lenient_super(true) + .skip_assertions(false) + .use_go_style_floats(use_go_style_floats); // Set max stack if specified - must be done before building state if let Some(max_stack) = opts.max_stack { @@ -245,8 +241,7 @@ fn setup_state( } /// Apply settings from .rtk-config.yaml to the context initializer -fn apply_rtk_config(context_init: &ContextInitializer, config: &RtkConfig) { - use jrsonnet_evaluator::manifest::set_use_go_style_floats; +fn apply_rtk_config(context_init: &ContextInitializer, config: &RtkConfig) -> bool { use jrsonnet_stdlib::{ ManifestYamlDocFormatting, ManifestYamlStreamEmptyBehavior, ManifestYamlStreamFormatting, QuoteValuesBehavior, @@ -280,7 +275,7 @@ fn apply_rtk_config(context_init: &ContextInitializer, config: &RtkConfig) { Some(JsonnetImplementation::Jrsonnet) => false, Some(JsonnetImplementation::GoJsonnet) | None => true, }; - set_use_go_style_floats(use_go_style); + use_go_style } /// Register Tanka-compatible native functions @@ -381,8 +376,8 @@ main{}{} let result = apply_tla(state, result, opts)?; // Manifest the result to JSON - let manifest = result - .manifest(jrsonnet_evaluator::manifest::JsonFormat::default()) + let manifest = state + .with_behavior(|| result.manifest(jrsonnet_evaluator::manifest::JsonFormat::default())) .map_err(|e| anyhow::anyhow!("manifest error:\n{}", e))?; Ok(manifest.to_string()) diff --git a/cmds/rtk/src/tanka.rs b/cmds/rtk/src/tanka.rs index edb0e0c8..1bf11a0f 100644 --- a/cmds/rtk/src/tanka.rs +++ b/cmds/rtk/src/tanka.rs @@ -3,7 +3,7 @@ // Tanka-compatible API accessible via std.native() use std::{ - collections::HashMap, + collections::{HashMap, HashSet}, io::{BufReader, Read, Write}, process::{Command, Stdio}, rc::Rc, @@ -52,7 +52,7 @@ fn get_helm_cache() -> &'static RwLock>> { /// Generate a key for a manifest using the nameFormat template /// This is a simplified implementation that handles the common case where nameFormat /// includes namespace in the key format -fn generate_manifest_key_from_val(val: &Val, name_format: Option<&str>) -> Result { +fn generate_manifest_key_from_val(val: &Val, name_format: Option<&str>) -> String { // Check if we should use nameFormat or default format let use_namespace_in_key = name_format .map(|fmt| fmt.contains("metadata.namespace") || fmt.contains(".or .metadata.namespace")) @@ -94,14 +94,13 @@ fn generate_manifest_key_from_val(val: &Val, name_format: Option<&str>) -> Resul }) .unwrap_or_else(|| "cluster".to_string()); - return Ok(format!("{}_{}_{}", namespace, kind, name)); - } else { - return Ok(format!("{}_{}", kind, name)); + return format!("{}_{}_{}", namespace, kind, name); } + return format!("{}_{}", kind, name); } } - Ok("unknown".to_string()) + "unknown".to_string() } /// Parse YAML output from helm into a Val object @@ -118,7 +117,7 @@ fn parse_helm_yaml_output(yaml_content: &str, name_format: Option<&str>) -> Resu }; let documents: Vec = serde_saphyr::from_multiple_with_options(yaml_content, options) .map_err(|e| RuntimeError(format!("failed to parse helm output: {e}").into()))?; - let mut seen_keys = HashMap::new(); + let mut seen_keys = HashSet::new(); for val in documents { // Skip null documents @@ -132,16 +131,16 @@ fn parse_helm_yaml_output(yaml_content: &str, name_format: Option<&str>) -> Resu } // Use the nameFormat-aware key generation - let key = generate_manifest_key_from_val(&val, name_format)?; + let key = generate_manifest_key_from_val(&val, name_format); // Check for duplicate keys and add counter if needed let mut final_key = key.clone(); let mut counter = 2; - while seen_keys.contains_key(&final_key) { + while seen_keys.contains(&final_key) { final_key = format!("{}_{}", key, counter); counter += 1; } - seen_keys.insert(final_key.clone(), ()); + seen_keys.insert(final_key.clone()); builder.field(&final_key).try_value(val)?; } @@ -184,7 +183,8 @@ fn helm_cache_key( /// Matches Go Tanka's naming behavior which inserts underscores: /// - Before uppercase letters (CamelCase -> camel_case) /// - Between letter-digit-letter sequences (k8s -> k_8s) -/// Note: Does NOT insert underscore when digit is at word boundary (flux2 stays flux2) +/// +/// Note: Does NOT insert underscore when digit is at word boundary (flux2 stays flux2). fn to_snake_case(s: &str) -> String { let mut result = String::new(); let chars: Vec = s.chars().collect(); @@ -211,10 +211,9 @@ fn to_snake_case(s: &str) -> String { // Look ahead past all consecutive digits to see if there's a letter let has_letter_after_digits = chars[i..] .iter() - .skip_while(|c| c.is_ascii_digit()) - .next() - .map(|c| c.is_ascii_alphabetic()) - .unwrap_or(false); + .find(|c| !c.is_ascii_digit()) + .copied() + .is_some_and(|c| c.is_ascii_alphabetic()); if has_letter_after_digits { result.push('_'); } @@ -330,14 +329,13 @@ fn yaml_v3_key_compare(a: &str, b: &str) -> std::cmp::Ordering { } else { std::cmp::Ordering::Greater }; - } else { - // Not after digits: non-letters come first - return if bl { - std::cmp::Ordering::Less - } else { - std::cmp::Ordering::Greater - }; } + // Not after digits: non-letters come first + return if bl { + std::cmp::Ordering::Less + } else { + std::cmp::Ordering::Greater + }; } // Both are non-letters - check for numeric sequences @@ -404,7 +402,7 @@ pub fn builtin_tanka_manifest_yaml_from_json(json: String) -> Result { empty_array_as_brackets: true, block_scalar_indent_in_seq: Some(2), // 2 spaces absolute for block scalar body in arrays line_width: None, // go-yaml v3's Marshal() doesn't wrap lines by default - scientific_notation_threshold: Some(1000000), // 1 million - large numbers use scientific notation + scientific_notation_threshold: Some(1_000_000), // 1 million - large numbers use scientific notation scientific_notation_small_threshold: Some(0.0001), // 1e-4 - small numbers use scientific notation (Go yaml.v3) quote_numeric_strings: true, // Quote numeric string keys like "12345" ..Default::default() @@ -834,7 +832,7 @@ pub fn builtin_tanka_kustomize_build(path: String, opts: ObjValue) -> Result = serde_saphyr::from_multiple_with_options(&yaml_content, options) .map_err(|e| RuntimeError(format!("failed to parse kustomize output: {e}").into()))?; - let mut seen_keys = HashMap::new(); + let mut seen_keys = HashSet::new(); for val in documents { // Skip null documents @@ -873,11 +871,11 @@ pub fn builtin_tanka_kustomize_build(path: String, opts: ObjValue) -> Result Self { + Self { + skip_assertions: false, + lenient_super: false, + use_go_style_floats: true, + } + } +} + +thread_local! { + static ACTIVE_BEHAVIOR: Cell = const { Cell::new(EvaluationBehavior { + skip_assertions: false, + lenient_super: false, + use_go_style_floats: true, + }) }; + static ACTIVE_IMPORT_STACK: RefCell> = const { RefCell::new(Vec::new()) }; +} + +pub(crate) fn active_behavior() -> EvaluationBehavior { + ACTIVE_BEHAVIOR.with(Cell::get) +} + +pub(crate) fn with_active_behavior( + behavior: EvaluationBehavior, + f: impl FnOnce() -> Result, +) -> Result { + struct ResetBehavior(EvaluationBehavior); + impl Drop for ResetBehavior { + fn drop(&mut self) { + ACTIVE_BEHAVIOR.with(|slot| slot.set(self.0)); + } + } + + let prev = ACTIVE_BEHAVIOR.with(|slot| { + let prev = slot.get(); + slot.set(behavior); + prev + }); + let _reset = ResetBehavior(prev); + f() +} + +#[derive(Clone, Copy, Debug, Trace, PartialEq, Eq)] +enum ImportEvaluationState { + Idle, + Evaluating, +} + +fn import_cycle_description(cycle: &[SourcePath]) -> String { + let chain = cycle + .iter() + .map(ToString::to_string) + .collect::>() + .join(" -> "); + format!("import cycle detected: {chain}") +} + +struct ImportEvaluationGuard { + state: State, + path: SourcePath, +} + +impl ImportEvaluationGuard { + fn enter(state: &State, path: &SourcePath, file: &mut FileData) -> Result { + let cycle = ACTIVE_IMPORT_STACK.with(|stack| { + let stack = stack.borrow(); + stack + .iter() + .position(|stack_path| stack_path == path) + .map(|cycle_start| { + let mut cycle = stack[cycle_start..].to_vec(); + cycle.push(path.clone()); + cycle + }) + }); + if let Some(cycle) = cycle { + let mut err = Error::from(InfiniteRecursionDetected); + err.trace_mut().0.push(error::StackTraceElement { + location: None, + desc: import_cycle_description(&cycle), + }); + return Err(err); + } + + if matches!(file.import_state, ImportEvaluationState::Evaluating) { + bail!(InfiniteRecursionDetected); + } + + ACTIVE_IMPORT_STACK.with(|stack| stack.borrow_mut().push(path.clone())); + file.import_state = ImportEvaluationState::Evaluating; + Ok(Self { + state: state.clone(), + path: path.clone(), + }) + } +} + +impl Drop for ImportEvaluationGuard { + fn drop(&mut self) { + ACTIVE_IMPORT_STACK.with(|stack| { + let mut stack = stack.borrow_mut(); + let popped = stack.pop(); + debug_assert_eq!(popped.as_ref(), Some(&self.path)); + }); + + let mut file_cache = self.state.file_cache(); + if let RawEntryMut::Occupied(mut file) = file_cache.raw_entry_mut().from_key(&self.path) { + file.get_mut().import_state = ImportEvaluationState::Idle; + } + } +} + /// Thunk without bound `super`/`this` /// object inheritance may be overriden multiple times, and will be fixed only on field read pub trait Unbound: Trace { @@ -177,7 +297,7 @@ struct FileData { parsed: Option, evaluated: Option, - evaluating: bool, + import_state: ImportEvaluationState, } impl FileData { fn new_string(data: IStr) -> Self { @@ -186,7 +306,7 @@ impl FileData { bytes: None, parsed: None, evaluated: None, - evaluating: false, + import_state: ImportEvaluationState::Idle, } } fn new_bytes(data: IBytes) -> Self { @@ -195,7 +315,7 @@ impl FileData { bytes: Some(data), parsed: None, evaluated: None, - evaluating: false, + import_state: ImportEvaluationState::Idle, } } pub(crate) fn get_string(&mut self) -> Option { @@ -221,6 +341,7 @@ pub struct EvaluationStateInternals { context_initializer: TraceBox, /// Used to resolve file locations/contents import_resolver: TraceBox, + behavior: EvaluationBehavior, } /// Maintains stack trace and import resolution @@ -321,33 +442,23 @@ impl State { ); } let parsed = file.parsed.as_ref().expect("just set").clone(); - // RELAXED: Allow re-importing files during evaluation to support lazy evaluation patterns. - // In Jsonnet, it's valid to have apparent "circular" imports as long as they're in lazy - // thunks that don't get evaluated. For example: - // { value: if cond then (import 'self.libsonnet').other else 42 } - // The original check was too strict and prevented legitimate patterns that Go Tanka handles. - // Real infinite recursion is still caught by: - // 1. Thunk Pending state (val.rs:105) - // 2. Stack depth limits (stack.rs) - // 3. Pending value access (dynamic.rs:44) - // if file.evaluating { - // bail!(InfiniteRecursionDetected) - // } - file.evaluating = true; - // Dropping file cache guard here, as evaluation may use this map too - drop(file_cache); - let res = evaluate(self.create_default_context(file_name), &parsed); - - let mut file_cache = self.file_cache(); - let mut file = file_cache.raw_entry_mut().from_key(&path); - - let RawEntryMut::Occupied(file) = &mut file else { - unreachable!("this file was just here!") + let res = { + let _import_guard = ImportEvaluationGuard::enter(self, &path, file)?; + // Drop file cache guard here, as evaluation may use this map too. + drop(file_cache); + with_active_behavior(self.behavior(), || { + evaluate(self.create_default_context(file_name), &parsed) + }) }; - let file = file.get_mut(); - file.evaluating = false; + match res { Ok(v) => { + let mut file_cache = self.file_cache(); + let mut file = file_cache.raw_entry_mut().from_key(&path); + let RawEntryMut::Occupied(file) = &mut file else { + unreachable!("this file was just here!") + }; + let file = file.get_mut(); file.evaluated = Some(v.clone()); Ok(v) } @@ -443,7 +554,9 @@ impl State { path: source.clone(), error: Box::new(e), })?; - evaluate(self.create_default_context(source), &parsed) + with_active_behavior(self.behavior(), || { + evaluate(self.create_default_context(source), &parsed) + }) } /// Parses and evaluates the given snippet with custom context modifier pub fn evaluate_snippet_with( @@ -464,10 +577,12 @@ impl State { path: source.clone(), error: Box::new(e), })?; - evaluate( - self.create_default_context_with(source, context_initializer), - &parsed, - ) + with_active_behavior(self.behavior(), || { + evaluate( + self.create_default_context_with(source, context_initializer), + &parsed, + ) + }) } } @@ -490,6 +605,12 @@ impl State { pub fn context_initializer(&self) -> &dyn ContextInitializer { &*self.0.context_initializer } + pub fn behavior(&self) -> EvaluationBehavior { + self.0.behavior + } + pub fn with_behavior(&self, f: impl FnOnce() -> Result) -> Result { + with_active_behavior(self.behavior(), f) + } } impl State { @@ -508,6 +629,7 @@ impl Default for State { pub struct StateBuilder { import_resolver: Option>, context_initializer: Option>, + behavior: EvaluationBehavior, } impl StateBuilder { pub fn import_resolver(&mut self, import_resolver: impl ImportResolver) -> &mut Self { @@ -521,6 +643,22 @@ impl StateBuilder { let _ = self.context_initializer.insert(tb!(context_initializer)); self } + pub fn behavior(&mut self, behavior: EvaluationBehavior) -> &mut Self { + self.behavior = behavior; + self + } + pub fn skip_assertions(&mut self, skip_assertions: bool) -> &mut Self { + self.behavior.skip_assertions = skip_assertions; + self + } + pub fn lenient_super(&mut self, lenient_super: bool) -> &mut Self { + self.behavior.lenient_super = lenient_super; + self + } + pub fn use_go_style_floats(&mut self, use_go_style_floats: bool) -> &mut Self { + self.behavior.use_go_style_floats = use_go_style_floats; + self + } pub fn build(mut self) -> State { State(Cc::new(EvaluationStateInternals { file_cache: RefCell::new(GcHashMap::new()), @@ -529,6 +667,215 @@ impl StateBuilder { .import_resolver .take() .unwrap_or_else(|| tb!(DummyImportResolver)), + behavior: self.behavior, })) } } + +#[cfg(test)] +mod tests { + use std::{ + fs, + path::{Path, PathBuf}, + process, + sync::{Arc, Barrier}, + thread, + time::{SystemTime, UNIX_EPOCH}, + }; + + use super::*; + use crate::error::ErrorKind; + + struct TempFixture { + root: PathBuf, + } + + impl TempFixture { + fn new(prefix: &str) -> Self { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("clock should be monotonic") + .as_nanos(); + let root = std::env::temp_dir().join(format!( + "jrsonnet-evaluator-{prefix}-{}-{nanos}", + process::id() + )); + fs::create_dir_all(&root).expect("fixture dir should be created"); + Self { root } + } + + fn write(&self, relative: &str, contents: &str) { + let path = self.root.join(relative); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).expect("fixture parent dir should be created"); + } + fs::write(path, contents).expect("fixture file should be written"); + } + + fn path(&self, relative: &str) -> PathBuf { + self.root.join(relative) + } + } + + impl Drop for TempFixture { + fn drop(&mut self) { + let _ = fs::remove_dir_all(&self.root); + } + } + + fn state_with_file_imports(root: &Path) -> State { + let mut builder = State::builder(); + builder.import_resolver(FileImportResolver::new(vec![root.to_path_buf()])); + builder.build() + } + + fn expect_infinite_recursion(err: &Error) { + assert!( + matches!(err.error(), ErrorKind::InfiniteRecursionDetected), + "expected InfiniteRecursionDetected, got: {err}" + ); + } + + fn expect_cycle_trace_path_order(err: &Error, path_order: &[&str]) { + let Some(cycle_trace) = err + .trace() + .0 + .iter() + .find(|frame| frame.desc.starts_with("import cycle detected:")) + else { + panic!("expected import cycle trace frame, got: {err}"); + }; + + let mut cursor = 0usize; + for path_fragment in path_order { + let haystack = &cycle_trace.desc[cursor..]; + let Some(rel_pos) = haystack.find(path_fragment) else { + panic!( + "expected path fragment {path_fragment:?} in cycle trace {:?}", + cycle_trace.desc + ); + }; + cursor += rel_pos + path_fragment.len(); + } + } + + #[test] + fn import_cycle_reports_infinite_recursion() { + let fixture = TempFixture::new("cycle-top-level"); + fixture.write("a.jsonnet", "(import 'b.jsonnet') + 1"); + fixture.write("b.jsonnet", "(import 'a.jsonnet') + 1"); + + let state = state_with_file_imports(&fixture.root); + let err = state + .import(fixture.path("a.jsonnet")) + .expect_err("top-level import cycle must fail"); + expect_infinite_recursion(&err); + expect_cycle_trace_path_order(&err, &["a.jsonnet", "b.jsonnet", "a.jsonnet"]); + } + + #[test] + fn lazy_import_cycle_is_allowed_until_forced() { + let fixture = TempFixture::new("cycle-lazy"); + fixture.write( + "a.jsonnet", + r#" +{ + ok: 1, + deferred: (import 'b.jsonnet').deferred, +} +"#, + ); + fixture.write( + "b.jsonnet", + r#" +{ + deferred: (import 'a.jsonnet').deferred, +} +"#, + ); + + let state = state_with_file_imports(&fixture.root); + let root = state + .import(fixture.path("a.jsonnet")) + .expect("lazy cycle should not fail before forcing"); + let Val::Obj(root_obj) = root else { + panic!("expected object root"); + }; + let ok = root_obj + .get("ok".into()) + .expect("reading non-cyclic field should succeed") + .expect("ok field should exist"); + assert_eq!(ok.to_string().expect("number to string").as_str(), "1"); + + let err = root_obj + .get("deferred".into()) + .expect_err("forcing cyclic field should fail"); + expect_infinite_recursion(&err); + } + + #[test] + fn behavior_flags_do_not_leak_across_threads() { + let start = Arc::new(Barrier::new(3)); + + let go_thread = { + let start = Arc::clone(&start); + thread::spawn(move || { + let mut builder = State::builder(); + builder.skip_assertions(false).use_go_style_floats(true); + let state = builder.build(); + start.wait(); + + for _ in 0..64 { + let rendered = state + .evaluate_snippet("", r#""%s" % 0.8"#) + .expect("go-style formatting should evaluate") + .to_string() + .expect("formatted value should render"); + assert_eq!(rendered.as_str(), "0.80000000000000004"); + + let err = state + .evaluate_snippet( + "", + r#"{ assert false : "boom", v: 1 }.v"#, + ) + .expect_err("assert-enabled state should fail"); + assert!(matches!(err.error(), ErrorKind::AssertionFailed(_))); + } + }) + }; + + let jr_thread = { + let start = Arc::clone(&start); + thread::spawn(move || { + let mut builder = State::builder(); + builder.skip_assertions(true).use_go_style_floats(false); + let state = builder.build(); + start.wait(); + + for _ in 0..64 { + let rendered = state + .evaluate_snippet("", r#""%s" % 0.8"#) + .expect("jr-style formatting should evaluate") + .to_string() + .expect("formatted value should render"); + assert_eq!(rendered.as_str(), "0.8"); + + let value = state + .evaluate_snippet( + "", + r#"{ assert false : "boom", v: 1 }.v"#, + ) + .expect("assert-skipped state should pass"); + assert_eq!( + value.to_string().expect("value should render").as_str(), + "1" + ); + } + }) + }; + + start.wait(); + go_thread.join().expect("go-style thread should finish"); + jr_thread.join().expect("jr-style thread should finish"); + } +} diff --git a/crates/jrsonnet-evaluator/src/manifest.rs b/crates/jrsonnet-evaluator/src/manifest.rs index e210276c..b85ee6bf 100644 --- a/crates/jrsonnet-evaluator/src/manifest.rs +++ b/crates/jrsonnet-evaluator/src/manifest.rs @@ -1,24 +1,10 @@ -use std::{borrow::Cow, cell::Cell, fmt::Write, ptr}; +use std::{borrow::Cow, fmt::Write, ptr}; -use crate::{bail, in_description_frame, Result, ResultExt, Val}; - -// Thread-local flag to control float formatting style in std.toString -// When true, uses Go's %.17g format (e.g., 0.59999999999999998) -// When false (default), uses Rust's shortest representation (e.g., 0.6) -thread_local! { - static USE_GO_STYLE_FLOATS: Cell = const { Cell::new(false) }; -} - -/// Set whether to use Go-style float formatting in std.toString -/// - true: Use Go's %.17g format (matches go-jsonnet) -/// - false (default): Use Rust's Display (shortest representation, matches jrsonnet binary) -pub fn set_use_go_style_floats(use_go_style: bool) { - USE_GO_STYLE_FLOATS.with(|s| s.set(use_go_style)); -} +use crate::{active_behavior, bail, in_description_frame, Result, ResultExt, Val}; /// Check if Go-style float formatting is enabled pub(crate) fn should_use_go_style_floats() -> bool { - USE_GO_STYLE_FLOATS.with(Cell::get) + active_behavior().use_go_style_floats } /// Format a float like Go's %.17g format @@ -37,7 +23,16 @@ pub(crate) fn format_float_go_g17(v: f64) -> String { v.abs().log10().floor() as i32 }; - if exp < -4 || exp >= 17 { + if (-4..17).contains(&exp) { + // Use decimal notation like %f but with 17 significant digits + // Calculate digits after decimal point needed for 17 sig figs + let digits_after_decimal = (16 - exp).max(0) as usize; + let formatted = format!("{:.prec$}", v, prec = digits_after_decimal); + // Trim trailing zeros but keep at least one digit after decimal if there was one + let trimmed = formatted.trim_end_matches('0'); + let trimmed = trimmed.trim_end_matches('.'); + trimmed.to_string() + } else { // Use scientific notation like %e let formatted = format!("{:.16e}", v); // Parse and clean up: "3.1415926535897930e0" -> "3.141592653589793e0" @@ -53,15 +48,6 @@ pub(crate) fn format_float_go_g17(v: f64) -> String { } else { formatted } - } else { - // Use decimal notation like %f but with 17 significant digits - // Calculate digits after decimal point needed for 17 sig figs - let digits_after_decimal = (16 - exp).max(0) as usize; - let formatted = format!("{:.prec$}", v, prec = digits_after_decimal); - // Trim trailing zeros but keep at least one digit after decimal if there was one - let trimmed = formatted.trim_end_matches('0'); - let trimmed = trimmed.trim_end_matches('.'); - trimmed.to_string() } } @@ -278,10 +264,12 @@ fn manifest_json_ex_buf( match mtype { // std.toString uses Go's unparseNumber: %.0f for integers, %.17g for floats // This is critical for config_hash compatibility (std.md5(std.toString(...))) - // The go-style formatting can be disabled via set_use_go_style_floats(false) - // to match upstream jrsonnet binary behavior + // The go-style formatting is configured per evaluator state to match + // go-jsonnet or upstream jrsonnet behavior. ToString => { - if v == v.floor() { + let floor = v.floor(); + let integer_margin = f64::EPSILON * v.abs().max(1.0); + if (v - floor).abs() <= integer_margin { write!(buf, "{:.0}", v).unwrap(); } else if should_use_go_style_floats() { buf.push_str(&format_float_go_g17(v)); @@ -528,10 +516,8 @@ impl ManifestFormat for YamlStreamFormat { } // For jrsonnet empty mode: always add trailing newline // For go-jsonnet mode: only add trailing newline if c_document_end is true - if self.jrsonnet_empty || self.c_document_end { - if self.end_newline { - out.push('\n'); - } + if (self.jrsonnet_empty || self.c_document_end) && self.end_newline { + out.push('\n'); } Ok(()) } diff --git a/crates/jrsonnet-evaluator/src/obj.rs b/crates/jrsonnet-evaluator/src/obj.rs index a58e005b..d0c3a71e 100644 --- a/crates/jrsonnet-evaluator/src/obj.rs +++ b/crates/jrsonnet-evaluator/src/obj.rs @@ -14,8 +14,6 @@ use rustc_hash::FxHashMap; // Thread-local flag to disable assertion checking // This is used to match Go Tanka's behavior of not running assertions during manifest generation thread_local! { - static SKIP_ASSERTIONS: Cell = const { Cell::new(false) }; - static LENIENT_SUPER: Cell = const { Cell::new(false) }; // Counter for how many assertions are currently being evaluated. // When inside assertion evaluation (counter > 0), we skip triggering new assertions // on field accesses to prevent infinite recursion when an assertion accesses a field @@ -23,14 +21,9 @@ thread_local! { static ASSERTION_DEPTH: Cell = const { Cell::new(0) }; } -/// Set whether to skip assertion checks (for manifest generation compatibility with Go Tanka) -pub fn set_skip_assertions(skip: bool) { - SKIP_ASSERTIONS.with(|s| s.set(skip)); -} - /// Check if assertions should be skipped fn should_skip_assertions() -> bool { - SKIP_ASSERTIONS.with(std::cell::Cell::get) + crate::active_behavior().skip_assertions } /// Check if we're currently inside assertion evaluation @@ -54,15 +47,9 @@ impl Drop for AssertionGuard { } } -/// Set whether to use lenient mode for super field access (return empty object instead of error) -/// This works around go-jsonnet compatibility issues where mixins reference super fields that don't exist yet -pub fn set_lenient_super(lenient: bool) { - LENIENT_SUPER.with(|s| s.set(lenient)); -} - /// Check if lenient super mode is enabled pub fn should_use_lenient_super() -> bool { - LENIENT_SUPER.with(std::cell::Cell::get) + crate::active_behavior().lenient_super } use crate::{ diff --git a/crates/jrsonnet-evaluator/src/stdlib/format.rs b/crates/jrsonnet-evaluator/src/stdlib/format.rs index eb1d0ebf..745c35e7 100644 --- a/crates/jrsonnet-evaluator/src/stdlib/format.rs +++ b/crates/jrsonnet-evaluator/src/stdlib/format.rs @@ -1,6 +1,8 @@ //! faster std.format impl #![allow(clippy::too_many_arguments)] +use std::fmt::Write as _; + use jrsonnet_gcmodule::Trace; use jrsonnet_interner::IStr; use jrsonnet_types::ValType; @@ -532,7 +534,7 @@ pub fn format_code( // Check if it's an integer (no fractional part) if n.fract() == 0.0 && n.abs() < 1e15 { // Format as integer without decimal point - tmp_out.push_str(&format!("{:.0}", n)); + write!(tmp_out, "{n:.0}").unwrap(); } else { // Use Go-style %.17g format if enabled, otherwise use Rust's Display (shortest) if manifest::should_use_go_style_floats() { @@ -541,7 +543,7 @@ pub fn format_code( // Use Rust's Display formatting (ryu algorithm) which produces // the shortest decimal representation, avoiding precision artifacts // like 0.80000000000000004 -> 0.8 - tmp_out.push_str(&format!("{}", n)); + write!(tmp_out, "{n}").unwrap(); } } } else { diff --git a/crates/jrsonnet-evaluator/src/tla.rs b/crates/jrsonnet-evaluator/src/tla.rs index 8e9b36eb..a7685fa5 100644 --- a/crates/jrsonnet-evaluator/src/tla.rs +++ b/crates/jrsonnet-evaluator/src/tla.rs @@ -7,22 +7,24 @@ use crate::{ }; pub fn apply_tla(s: State, args: &A, val: Val) -> Result { - Ok(if let Val::Func(func) = val { - in_description_frame( - || "during TLA call".to_owned(), - || { - func.evaluate( - s.create_default_context(Source::new_virtual( - "".into(), - IStr::empty(), - )), - CallLocation::native(), - args, - false, - ) - }, - )? - } else { - val + s.with_behavior(|| { + Ok(if let Val::Func(func) = val { + in_description_frame( + || "during TLA call".to_owned(), + || { + func.evaluate( + s.create_default_context(Source::new_virtual( + "".into(), + IStr::empty(), + )), + CallLocation::native(), + args, + false, + ) + }, + )? + } else { + val + }) }) } diff --git a/crates/jrsonnet-lsp-check/Cargo.toml b/crates/jrsonnet-lsp-check/Cargo.toml new file mode 100644 index 00000000..3ebe9344 --- /dev/null +++ b/crates/jrsonnet-lsp-check/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "jrsonnet-lsp-check" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Type checking and linting for jrsonnet LSP" + +[dependencies] +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-lsp-inference = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-inference" } +jrsonnet-lsp-scope = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scope" } +jrsonnet-lsp-stdlib = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-stdlib" } +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +lsp-types.workspace = true +rowan.workspace = true +rustc-hash.workspace = true + +[dev-dependencies] +assert_matches = "1.5.0" +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +rstest = "0.23" + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-check/src/diagnostic.rs b/crates/jrsonnet-lsp-check/src/diagnostic.rs new file mode 100644 index 00000000..ec091b59 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/diagnostic.rs @@ -0,0 +1,406 @@ +//! Unified diagnostic handling for Jsonnet LSP. +//! +//! Provides a consistent error code system and diagnostic builder for +//! type checking, linting, and format checking. + +use jrsonnet_lsp_document::LspRange; +use lsp_types::{DiagnosticRelatedInformation, DiagnosticSeverity, Location, Uri}; + +/// Severity level for diagnostics. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum Severity { + Error, + Warning, + Information, + Hint, +} + +impl From for DiagnosticSeverity { + fn from(severity: Severity) -> Self { + match severity { + Severity::Error => DiagnosticSeverity::ERROR, + Severity::Warning => DiagnosticSeverity::WARNING, + Severity::Information => DiagnosticSeverity::INFORMATION, + Severity::Hint => DiagnosticSeverity::HINT, + } + } +} + +/// Diagnostic error code for programmatic handling. +/// +/// Error codes are grouped by category: +/// - `E0xx`: Type errors +/// - `E1xx`: Scope errors +/// - `E2xx`: Format string errors +/// - `W0xx`: Lint warnings +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ErrorCode { + // Type errors (E0xx) + /// Binary operator type mismatch + BinaryOpMismatch, + /// Unary operator type mismatch + UnaryOpMismatch, + /// Field access on non-object + FieldAccessOnNonObject, + /// Index access on non-indexable type + IndexOnNonIndexable, + /// Function call on non-callable + CallOnNonFunction, + /// Wrong argument count + WrongArgCount, + /// Missing required arguments + TooFewArguments, + /// Too many arguments + TooManyArguments, + /// No such field on object + NoSuchField, + /// Tuple index out of bounds + TupleIndexOutOfBounds, + /// Argument type mismatch + ArgumentTypeMismatch, + /// Callback parameter type mismatch + CallbackTypeMismatch, + + // Scope errors (E1xx) + /// Undefined variable + UndefinedVariable, + /// Duplicate variable binding + DuplicateVariable, + /// Duplicate object field + DuplicateField, + /// Duplicate function parameter + DuplicateParameter, + + // Format string errors (E2xx) + /// Invalid format specifier + FormatInvalidSpecifier, + /// Wrong number of format arguments + FormatArgCountMismatch, + /// Format argument type mismatch + FormatTypeMismatch, + + // Lint warnings (W0xx) + /// Variable declared but never used + UnusedVariable, + /// Parameter declared but never used + UnusedParameter, + /// Variable shadows another variable + ShadowedVariable, + /// Code after error expression is unreachable + UnreachableCode, +} + +impl ErrorCode { + /// Get the default severity for this error code. + #[must_use] + pub fn default_severity(&self) -> Severity { + match self { + Self::BinaryOpMismatch + | Self::UnaryOpMismatch + | Self::FieldAccessOnNonObject + | Self::IndexOnNonIndexable + | Self::CallOnNonFunction + | Self::WrongArgCount + | Self::TooFewArguments + | Self::TooManyArguments + | Self::NoSuchField + | Self::TupleIndexOutOfBounds + | Self::ArgumentTypeMismatch + | Self::CallbackTypeMismatch + | Self::UndefinedVariable + | Self::DuplicateVariable + | Self::DuplicateField + | Self::DuplicateParameter + | Self::FormatInvalidSpecifier + | Self::FormatArgCountMismatch + | Self::FormatTypeMismatch => Severity::Error, + + // Lint warnings + Self::UnusedVariable | Self::UnusedParameter | Self::ShadowedVariable => { + Severity::Warning + } + + // Hints + Self::UnreachableCode => Severity::Hint, + } + } + + /// Get the string code for LSP diagnostic. + /// + /// Format: `E0xx` for errors, `W0xx` for warnings. + #[must_use] + pub fn as_str(&self) -> &'static str { + match self { + // Type errors + Self::BinaryOpMismatch => "E001", + Self::UnaryOpMismatch => "E002", + Self::FieldAccessOnNonObject => "E003", + Self::IndexOnNonIndexable => "E004", + Self::CallOnNonFunction => "E005", + Self::WrongArgCount => "E006", + Self::TooFewArguments => "E007", + Self::TooManyArguments => "E008", + Self::NoSuchField => "E009", + Self::TupleIndexOutOfBounds => "E010", + Self::ArgumentTypeMismatch => "E011", + Self::CallbackTypeMismatch => "E012", + + // Scope errors + Self::UndefinedVariable => "E101", + Self::DuplicateVariable => "E102", + Self::DuplicateField => "E103", + Self::DuplicateParameter => "E104", + + // Format errors + Self::FormatInvalidSpecifier => "E201", + Self::FormatArgCountMismatch => "E202", + Self::FormatTypeMismatch => "E203", + + // Warnings + Self::UnusedVariable => "W001", + Self::UnusedParameter => "W002", + Self::ShadowedVariable => "W003", + Self::UnreachableCode => "W004", + } + } +} + +/// A diagnostic message with location and metadata. +#[derive(Debug, Clone)] +pub struct Diagnostic { + /// The error code. + pub code: ErrorCode, + /// Severity level. + pub severity: Severity, + /// Human-readable message. + pub message: String, + /// Source location (LSP range). + pub range: LspRange, + /// Optional suggestion for fixing the error. + pub suggestion: Option, + /// Related information (e.g., "defined here"). + pub related: Vec, +} + +/// Related diagnostic information. +#[derive(Debug, Clone)] +pub struct RelatedInfo { + /// Description of the relation. + pub message: String, + /// Location of the related item. + pub uri: Uri, + /// Range within the file. + pub range: LspRange, +} + +impl Diagnostic { + /// Create a new diagnostic with the given code and message. + pub fn new(code: ErrorCode, message: impl Into, range: LspRange) -> Self { + Self { + severity: code.default_severity(), + code, + message: message.into(), + range, + suggestion: None, + related: Vec::new(), + } + } + + /// Add a suggestion to this diagnostic. + #[must_use] + pub fn with_suggestion(mut self, message: impl Into) -> Self { + self.suggestion = Some(message.into()); + self + } + + /// Add related information. + #[must_use] + pub fn with_related(mut self, message: impl Into, uri: Uri, range: LspRange) -> Self { + self.related.push(RelatedInfo { + message: message.into(), + uri, + range, + }); + self + } + + /// Convert to LSP Diagnostic. + #[must_use] + pub fn to_lsp(&self) -> lsp_types::Diagnostic { + let related_information = if self.related.is_empty() { + None + } else { + Some( + self.related + .iter() + .map(|r| DiagnosticRelatedInformation { + location: Location { + uri: r.uri.clone(), + range: r.range.into(), + }, + message: r.message.clone(), + }) + .collect(), + ) + }; + + let mut message = self.message.clone(); + if let Some(suggestion) = &self.suggestion { + message.push_str("\n\nSuggestion: "); + message.push_str(suggestion); + } + + lsp_types::Diagnostic { + range: self.range.into(), + severity: Some(self.severity.into()), + code: Some(lsp_types::NumberOrString::String( + self.code.as_str().to_string(), + )), + code_description: None, + source: Some("jrsonnet".to_string()), + message, + related_information, + tags: None, + data: None, + } + } +} + +/// Builder for collecting diagnostics during analysis. +#[derive(Debug, Default)] +pub struct DiagnosticCollector { + diagnostics: Vec, +} + +impl DiagnosticCollector { + /// Create a new empty collector. + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Push a diagnostic. + pub fn push(&mut self, diagnostic: Diagnostic) { + self.diagnostics.push(diagnostic); + } + + /// Add an error diagnostic. + pub fn error(&mut self, code: ErrorCode, message: impl Into, range: LspRange) { + self.push(Diagnostic::new(code, message, range)); + } + + /// Add a warning diagnostic. + pub fn warning(&mut self, code: ErrorCode, message: impl Into, range: LspRange) { + let mut diag = Diagnostic::new(code, message, range); + diag.severity = Severity::Warning; + self.push(diag); + } + + /// Convert all diagnostics to LSP format. + #[must_use] + pub fn into_lsp_diagnostics(self) -> Vec { + self.diagnostics.into_iter().map(|d| d.to_lsp()).collect() + } + + /// Get the collected diagnostics. + #[must_use] + pub fn into_diagnostics(self) -> Vec { + self.diagnostics + } + + /// Check if there are any diagnostics. + #[must_use] + pub fn is_empty(&self) -> bool { + self.diagnostics.is_empty() + } + + /// Check if there are any errors. + #[must_use] + pub fn has_errors(&self) -> bool { + self.diagnostics + .iter() + .any(|d| d.severity == Severity::Error) + } + + /// Get the number of diagnostics. + #[must_use] + pub fn len(&self) -> usize { + self.diagnostics.len() + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{CharOffset, Line, LspPosition}; + + use super::*; + + fn test_range() -> LspRange { + LspRange { + start: LspPosition { + line: Line(0), + character: CharOffset(0), + }, + end: LspPosition { + line: Line(0), + character: CharOffset(5), + }, + } + } + + #[test] + fn test_error_codes() { + assert_eq!(ErrorCode::BinaryOpMismatch.as_str(), "E001"); + assert_eq!(ErrorCode::UnusedVariable.as_str(), "W001"); + assert_eq!( + ErrorCode::BinaryOpMismatch.default_severity(), + Severity::Error + ); + assert_eq!( + ErrorCode::UnusedVariable.default_severity(), + Severity::Warning + ); + } + + #[test] + fn test_diagnostic_creation() { + let diag = Diagnostic::new( + ErrorCode::NoSuchField, + "field 'foo' not found", + test_range(), + ); + assert_eq!(diag.code, ErrorCode::NoSuchField); + assert_eq!(diag.severity, Severity::Error); + assert!(diag.message.contains("foo")); + } + + #[test] + fn test_diagnostic_with_suggestion() { + let diag = Diagnostic::new( + ErrorCode::NoSuchField, + "field 'foo' not found", + test_range(), + ) + .with_suggestion("did you mean 'food'?"); + assert!(diag.suggestion.is_some()); + + let lsp = diag.to_lsp(); + assert!(lsp.message.contains("Suggestion:")); + assert!(lsp.message.contains("food")); + } + + #[test] + fn test_collector() { + let mut collector = DiagnosticCollector::new(); + assert!(collector.is_empty()); + + collector.error(ErrorCode::NoSuchField, "error 1", test_range()); + collector.warning(ErrorCode::UnusedVariable, "warning 1", test_range()); + + assert_eq!(collector.len(), 2); + assert!(collector.has_errors()); + + let lsp_diagnostics = collector.into_lsp_diagnostics(); + assert_eq!(lsp_diagnostics.len(), 2); + } +} diff --git a/crates/jrsonnet-lsp-check/src/format_check.rs b/crates/jrsonnet-lsp-check/src/format_check.rs new file mode 100644 index 00000000..1c9d47ce --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/format_check.rs @@ -0,0 +1,740 @@ +//! Format string validation for std.format and the % operator. +//! +//! Jsonnet uses Python-style format strings: +//! - `%s` - string (any value) +//! - `%d`, `%i`, `%u` - integer (number) +//! - `%o`, `%x`, `%X` - integer in octal/hex (number) +//! - `%e`, `%E`, `%f`, `%F`, `%g`, `%G` - floating point (number) +//! - `%c` - character (number or single-char string) +//! - `%%` - literal % +//! - `%(name)s` - named argument from object +//! +//! Format modifiers: +//! - Flags: `-` (left-justify), `+` (show sign), ` ` (space for positive), `#` (alternate form), `0` (zero-pad) +//! - Width: minimum field width (digits or `*` for dynamic) +//! - Precision: `.` followed by digits or `*` (for floats: decimal places; for strings: max chars) + +use jrsonnet_lsp_types::{Ty, TyData, TypeStoreOps}; + +/// Expected type category for a format placeholder. +/// +/// This is a lightweight representation that avoids needing a `TyStore` during parsing. +/// Convert to `Ty` when needed for type checking. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum FormatTypeKind { + /// Any type is accepted (%s - toString is called) + Any, + /// Number type required (%d, %f, etc.) + Number, + /// Number or single-character string (%c) + NumberOrString, +} + +impl FormatTypeKind { + /// Convert to the corresponding Ty. + /// + /// For `NumberOrString`, creates a union type in the provided store. + pub fn to_ty(self, store: &mut S) -> Ty { + match self { + FormatTypeKind::Any => Ty::ANY, + FormatTypeKind::Number => Ty::NUMBER, + FormatTypeKind::NumberOrString => store.union(vec![Ty::NUMBER, Ty::STRING]), + } + } + + /// Check if the given Ty is compatible with this format type kind. + pub fn is_compatible_with(self, ty: Ty, store: &S) -> bool { + // Any is always compatible + if ty.is_any() { + return true; + } + + match self { + FormatTypeKind::Any => true, + FormatTypeKind::Number => { + ty == Ty::NUMBER || matches!(store.get_data(ty), TyData::BoundedNumber(_)) + } + FormatTypeKind::NumberOrString => { + ty == Ty::NUMBER + || ty == Ty::STRING + || ty == Ty::CHAR + || matches!( + store.get_data(ty), + TyData::BoundedNumber(_) | TyData::LiteralString(_) + ) + } + } + } +} + +/// Format flags that modify output. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum FormatFlag { + LeftJustify, + ShowSign, + SpaceSign, + Alternate, + ZeroPad, +} + +impl FormatFlag { + const fn bit(self) -> u8 { + match self { + Self::LeftJustify => 1 << 0, + Self::ShowSign => 1 << 1, + Self::SpaceSign => 1 << 2, + Self::Alternate => 1 << 3, + Self::ZeroPad => 1 << 4, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub struct FormatFlags { + bits: u8, +} + +impl FormatFlags { + pub fn insert(&mut self, flag: FormatFlag) { + self.bits |= flag.bit(); + } +} + +impl FromIterator for FormatFlags { + fn from_iter>(iter: T) -> Self { + let mut flags = Self::default(); + for flag in iter { + flags.insert(flag); + } + flags + } +} + +/// Width specification for format placeholder. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum WidthSpec { + /// No width specified. + #[default] + None, + /// Fixed width from format string. + Fixed(usize), + /// Dynamic width from next argument (`*`). + Dynamic, +} + +/// Precision specification for format placeholder. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum PrecisionSpec { + /// No precision specified. + #[default] + None, + /// Fixed precision from format string. + Fixed(usize), + /// Dynamic precision from next argument (`.*`). + Dynamic, +} + +/// Parsed format modifiers. +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub struct FormatModifiers { + /// Format flags. + pub flags: FormatFlags, + /// Width specification. + pub width: WidthSpec, + /// Precision specification. + pub precision: PrecisionSpec, +} + +impl FormatModifiers { + /// Count how many extra arguments are consumed by dynamic width/precision. + #[must_use] + pub fn dynamic_arg_count(&self) -> usize { + let width_args = usize::from(self.width == WidthSpec::Dynamic); + let prec_args = usize::from(self.precision == PrecisionSpec::Dynamic); + width_args + prec_args + } +} + +/// A placeholder in a format string. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum FormatPlaceholder { + /// Positional placeholder like `%s` or `%d`. + Positional { + /// Expected type category for this placeholder. + expected_type: FormatTypeKind, + /// The format specifier character (s, d, f, etc.). + specifier: char, + /// Parsed format modifiers. + modifiers: FormatModifiers, + }, + /// Named placeholder like `%(name)s`. + Named { + /// The name in the placeholder. + name: String, + /// Expected type category for this placeholder. + expected_type: FormatTypeKind, + /// The format specifier character. + specifier: char, + /// Parsed format modifiers. + modifiers: FormatModifiers, + }, +} + +/// Result of parsing a format string. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct FormatSpec { + /// List of placeholders in order of appearance. + pub placeholders: Vec, + /// Whether all placeholders are named (requires object argument). + pub uses_named: bool, + /// Whether any placeholders are positional (requires array argument). + pub uses_positional: bool, +} + +impl FormatSpec { + /// Count the total number of positional arguments required. + /// + /// This includes: + /// - One for each positional placeholder + /// - One for each dynamic width (`*`) + /// - One for each dynamic precision (`.*`) + #[must_use] + pub fn positional_arg_count(&self) -> usize { + self.placeholders + .iter() + .filter_map(|p| match p { + FormatPlaceholder::Positional { modifiers, .. } => { + Some(1 + modifiers.dynamic_arg_count()) + } + FormatPlaceholder::Named { .. } => None, + }) + .sum() + } + + /// Get all named field names required. + #[must_use] + pub fn named_fields(&self) -> Vec<&str> { + self.placeholders + .iter() + .filter_map(|p| match p { + FormatPlaceholder::Named { name, .. } => Some(name.as_str()), + FormatPlaceholder::Positional { .. } => None, + }) + .collect() + } +} + +/// Error encountered while parsing a format string. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum FormatParseError { + /// Incomplete format specifier at end of string. + IncompleteSpecifier, + /// Unknown format specifier character. + UnknownSpecifier(char), + /// Missing closing paren in named placeholder. + UnclosedNamedPlaceholder, + /// Empty name in named placeholder. + EmptyName, + /// Mixed positional and named placeholders. + MixedPositionalAndNamed, +} + +/// Parse a format string and extract placeholder information. +/// +/// # Errors +/// Returns `Err` when the format string contains an incomplete specifier, +/// malformed named placeholder, unknown specifier, or mixed named/positional placeholders. +pub fn parse_format_string(fmt: &str) -> Result { + let mut placeholders = Vec::new(); + let mut uses_named = false; + let mut uses_positional = false; + + let chars: Vec = fmt.chars().collect(); + let mut i = 0; + + while i < chars.len() { + let Some(current) = chars.get(i).copied() else { + break; + }; + if current != '%' { + i += 1; + continue; + } + + // Found a % + i += 1; + let Some(next) = chars.get(i).copied() else { + return Err(FormatParseError::IncompleteSpecifier); + }; + + // Check for %% + if next == '%' { + i += 1; + continue; + } + + // Check for named placeholder %(name) + if next == '(' { + i += 1; + let name_start = i; + + // Find closing paren + while matches!(chars.get(i), Some(ch) if *ch != ')') { + i += 1; + } + if !matches!(chars.get(i), Some(ch) if *ch == ')') { + return Err(FormatParseError::UnclosedNamedPlaceholder); + } + + let Some(name_chars) = chars.get(name_start..i) else { + return Err(FormatParseError::UnclosedNamedPlaceholder); + }; + let name: String = name_chars.iter().collect(); + if name.is_empty() { + return Err(FormatParseError::EmptyName); + } + + i += 1; // Skip ) + + // Parse flags, width, precision + let (modifiers, new_i) = parse_format_modifiers(&chars, i); + i = new_i; + + let Some(specifier) = chars.get(i).copied() else { + return Err(FormatParseError::IncompleteSpecifier); + }; + let expected_type = specifier_to_type_kind(specifier)?; + + placeholders.push(FormatPlaceholder::Named { + name, + expected_type, + specifier, + modifiers, + }); + uses_named = true; + i += 1; + continue; + } + + // Positional placeholder - parse flags, width, precision + let (modifiers, new_i) = parse_format_modifiers(&chars, i); + i = new_i; + + let Some(specifier) = chars.get(i).copied() else { + return Err(FormatParseError::IncompleteSpecifier); + }; + let expected_type = specifier_to_type_kind(specifier)?; + + placeholders.push(FormatPlaceholder::Positional { + expected_type, + specifier, + modifiers, + }); + uses_positional = true; + i += 1; + } + + // Check for mixed usage + if uses_named && uses_positional { + return Err(FormatParseError::MixedPositionalAndNamed); + } + + Ok(FormatSpec { + placeholders, + uses_named, + uses_positional, + }) +} + +/// Parse format modifiers (flags, width, precision) and return them with new index. +fn parse_format_modifiers(chars: &[char], mut i: usize) -> (FormatModifiers, usize) { + let mut modifiers = FormatModifiers::default(); + + // Parse flags: -, +, space, #, 0 + while let Some(ch) = chars.get(i).copied() { + match ch { + '-' => modifiers.flags.insert(FormatFlag::LeftJustify), + '+' => modifiers.flags.insert(FormatFlag::ShowSign), + ' ' => modifiers.flags.insert(FormatFlag::SpaceSign), + '#' => modifiers.flags.insert(FormatFlag::Alternate), + '0' => modifiers.flags.insert(FormatFlag::ZeroPad), + _ => break, + } + i += 1; + } + + // Parse width (digits or *) + if matches!(chars.get(i), Some(ch) if *ch == '*') { + modifiers.width = WidthSpec::Dynamic; + i += 1; + } else { + let width_start = i; + while matches!(chars.get(i), Some(ch) if ch.is_ascii_digit()) { + i += 1; + } + if i > width_start { + if let Some(width_chars) = chars.get(width_start..i) { + let width_str: String = width_chars.iter().collect(); + if let Ok(width) = width_str.parse::() { + modifiers.width = WidthSpec::Fixed(width); + } + } + } + } + + // Parse precision (.digits or .*) + if matches!(chars.get(i), Some(ch) if *ch == '.') { + i += 1; + if matches!(chars.get(i), Some(ch) if *ch == '*') { + modifiers.precision = PrecisionSpec::Dynamic; + i += 1; + } else { + let prec_start = i; + while matches!(chars.get(i), Some(ch) if ch.is_ascii_digit()) { + i += 1; + } + if i > prec_start { + if let Some(prec_chars) = chars.get(prec_start..i) { + let prec_str: String = prec_chars.iter().collect(); + if let Ok(prec) = prec_str.parse::() { + modifiers.precision = PrecisionSpec::Fixed(prec); + } + } + } else { + // Just "." with no digits means precision 0 + modifiers.precision = PrecisionSpec::Fixed(0); + } + } + } + + (modifiers, i) +} + +/// Convert a format specifier character to its expected type kind. +fn specifier_to_type_kind(specifier: char) -> Result { + match specifier { + 's' => Ok(FormatTypeKind::Any), // toString is called + 'd' | 'i' | 'u' | 'o' | 'x' | 'X' | 'e' | 'E' | 'f' | 'F' | 'g' | 'G' => { + Ok(FormatTypeKind::Number) + } + 'c' => Ok(FormatTypeKind::NumberOrString), + _ => Err(FormatParseError::UnknownSpecifier(specifier)), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn flags(values: &[FormatFlag]) -> FormatFlags { + values.iter().copied().collect() + } + + fn positional(specifier: char, expected_type: FormatTypeKind) -> FormatPlaceholder { + FormatPlaceholder::Positional { + expected_type, + specifier, + modifiers: FormatModifiers::default(), + } + } + + fn positional_with_mods( + specifier: char, + expected_type: FormatTypeKind, + modifiers: FormatModifiers, + ) -> FormatPlaceholder { + FormatPlaceholder::Positional { + expected_type, + specifier, + modifiers, + } + } + + fn named(name: &str, specifier: char, expected_type: FormatTypeKind) -> FormatPlaceholder { + FormatPlaceholder::Named { + name: name.to_string(), + expected_type, + specifier, + modifiers: FormatModifiers::default(), + } + } + + #[test] + fn test_simple_format_string() { + assert_eq!( + parse_format_string("Hello %s!"), + Ok(FormatSpec { + placeholders: vec![positional('s', FormatTypeKind::Any)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_multiple_placeholders() { + assert_eq!( + parse_format_string("%s has %d apples"), + Ok(FormatSpec { + placeholders: vec![ + positional('s', FormatTypeKind::Any), + positional('d', FormatTypeKind::Number), + ], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_escaped_percent() { + assert_eq!( + parse_format_string("100%% complete"), + Ok(FormatSpec { + placeholders: vec![], + uses_positional: false, + uses_named: false, + }) + ); + } + + #[test] + fn test_named_placeholder() { + assert_eq!( + parse_format_string("Hello %(name)s!"), + Ok(FormatSpec { + placeholders: vec![named("name", 's', FormatTypeKind::Any)], + uses_positional: false, + uses_named: true, + }) + ); + } + + #[test] + fn test_format_with_width_precision() { + let mods = FormatModifiers { + width: WidthSpec::Fixed(10), + precision: PrecisionSpec::Fixed(2), + ..Default::default() + }; + assert_eq!( + parse_format_string("%10.2f"), + Ok(FormatSpec { + placeholders: vec![positional_with_mods('f', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_format_with_flags() { + let mods = FormatModifiers { + flags: flags(&[FormatFlag::LeftJustify, FormatFlag::ShowSign]), + width: WidthSpec::Fixed(10), + ..Default::default() + }; + assert_eq!( + parse_format_string("%-+10d"), + Ok(FormatSpec { + placeholders: vec![positional_with_mods('d', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_format_with_all_flags() { + let mods = FormatModifiers { + flags: flags(&[ + FormatFlag::LeftJustify, + FormatFlag::ShowSign, + FormatFlag::SpaceSign, + FormatFlag::Alternate, + FormatFlag::ZeroPad, + ]), + ..Default::default() + }; + assert_eq!( + parse_format_string("%-+ #0d"), + Ok(FormatSpec { + placeholders: vec![positional_with_mods('d', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_dynamic_width() { + let mods = FormatModifiers { + width: WidthSpec::Dynamic, + ..Default::default() + }; + let spec = parse_format_string("%*s").unwrap(); + assert_eq!( + spec, + FormatSpec { + placeholders: vec![positional_with_mods('s', FormatTypeKind::Any, mods)], + uses_positional: true, + uses_named: false, + } + ); + // Dynamic width consumes one extra arg + assert_eq!(spec.positional_arg_count(), 2); + } + + #[test] + fn test_dynamic_precision() { + let mods = FormatModifiers { + precision: PrecisionSpec::Dynamic, + ..Default::default() + }; + let spec = parse_format_string("%.*f").unwrap(); + assert_eq!( + spec, + FormatSpec { + placeholders: vec![positional_with_mods('f', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + } + ); + // Dynamic precision consumes one extra arg + assert_eq!(spec.positional_arg_count(), 2); + } + + #[test] + fn test_dynamic_width_and_precision() { + let mods = FormatModifiers { + width: WidthSpec::Dynamic, + precision: PrecisionSpec::Dynamic, + ..Default::default() + }; + let spec = parse_format_string("%*.*f").unwrap(); + assert_eq!( + spec, + FormatSpec { + placeholders: vec![positional_with_mods('f', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + } + ); + // Both dynamic width and precision consume args + assert_eq!(spec.positional_arg_count(), 3); + } + + #[test] + fn test_precision_only() { + let mods = FormatModifiers { + precision: PrecisionSpec::Fixed(5), + ..Default::default() + }; + assert_eq!( + parse_format_string("%.5f"), + Ok(FormatSpec { + placeholders: vec![positional_with_mods('f', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_precision_zero() { + let mods = FormatModifiers { + precision: PrecisionSpec::Fixed(0), + ..Default::default() + }; + assert_eq!( + parse_format_string("%.f"), + Ok(FormatSpec { + placeholders: vec![positional_with_mods('f', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_incomplete_specifier() { + assert_eq!( + parse_format_string("Hello %"), + Err(FormatParseError::IncompleteSpecifier) + ); + } + + #[test] + fn test_unknown_specifier() { + assert_eq!( + parse_format_string("%z"), + Err(FormatParseError::UnknownSpecifier('z')) + ); + } + + #[test] + fn test_unclosed_named() { + assert_eq!( + parse_format_string("%(name"), + Err(FormatParseError::UnclosedNamedPlaceholder) + ); + } + + #[test] + fn test_empty_name() { + assert_eq!( + parse_format_string("%()s"), + Err(FormatParseError::EmptyName) + ); + } + + #[test] + fn test_mixed_positional_and_named() { + assert_eq!( + parse_format_string("%s %(name)s"), + Err(FormatParseError::MixedPositionalAndNamed) + ); + } + + #[test] + fn test_all_numeric_specifiers() { + for spec_char in ['d', 'i', 'u', 'o', 'x', 'X', 'e', 'E', 'f', 'F', 'g', 'G'] { + let fmt = format!("%{spec_char}"); + assert_eq!( + parse_format_string(&fmt), + Ok(FormatSpec { + placeholders: vec![positional(spec_char, FormatTypeKind::Number)], + uses_positional: true, + uses_named: false, + }), + "specifier {spec_char}" + ); + } + } + + #[test] + fn test_complex_format_string() { + // Real-world example: "%-20s: %+10.2f%%" + let mods1 = FormatModifiers { + flags: flags(&[FormatFlag::LeftJustify]), + width: WidthSpec::Fixed(20), + ..Default::default() + }; + let mods2 = FormatModifiers { + flags: flags(&[FormatFlag::ShowSign]), + width: WidthSpec::Fixed(10), + precision: PrecisionSpec::Fixed(2), + }; + assert_eq!( + parse_format_string("%-20s: %+10.2f%%"), + Ok(FormatSpec { + placeholders: vec![ + positional_with_mods('s', FormatTypeKind::Any, mods1), + positional_with_mods('f', FormatTypeKind::Number, mods2), + ], + uses_positional: true, + uses_named: false, + }) + ); + } +} diff --git a/crates/jrsonnet-lsp-check/src/lib.rs b/crates/jrsonnet-lsp-check/src/lib.rs new file mode 100644 index 00000000..b37a1da5 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/lib.rs @@ -0,0 +1,17 @@ +//! Type checking and linting for Jsonnet LSP. +//! +//! Provides static analysis that runs after type inference: +//! - Type checking: detect type mismatches before evaluation +//! - Linting: detect unused variables, shadowing, unreachable code +//! - Format checking: validate format strings in std.format and % +//! - Unified diagnostics: consistent error codes and formatting + +pub mod diagnostic; +pub mod format_check; +pub mod lint; +pub mod type_check; + +pub use diagnostic::{Diagnostic, DiagnosticCollector, ErrorCode, RelatedInfo, Severity}; +pub use format_check::{parse_format_string, FormatParseError, FormatPlaceholder, FormatTypeKind}; +pub use lint::{lint, LintConfig}; +pub use type_check::{check_types, TypeCheckConfig, TypeError, TypeErrorKind}; diff --git a/crates/jrsonnet-lsp-check/src/lint/duplicates.rs b/crates/jrsonnet-lsp-check/src/lint/duplicates.rs new file mode 100644 index 00000000..f32f4849 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/lint/duplicates.rs @@ -0,0 +1,185 @@ +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, ExprFunction, ExprObject, FieldName, Member, ObjBody}, + AstNode, SyntaxKind, SyntaxNode, +}; +use lsp_types::{ + Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, Location, NumberOrString, +}; +use rowan::TextRange; +use rustc_hash::FxHashMap; + +use super::LintContext; + +pub(super) fn check_duplicate_fields( + node: &SyntaxNode, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + // Find all object expressions + for child in node.descendants() { + if child.kind() == SyntaxKind::EXPR_OBJECT { + if let Some(obj) = ExprObject::cast(child) { + check_object_for_duplicate_fields(&obj, ctx, diagnostics); + } + } + } +} + +/// Check a single object expression for duplicate fields. +fn check_object_for_duplicate_fields( + obj: &ExprObject, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + let mut seen: FxHashMap = FxHashMap::default(); + + let Some(obj_body) = obj.obj_body() else { + return; + }; + + let ObjBody::ObjBodyMemberList(members) = obj_body else { + // Object comprehension - can't have static duplicate fields + return; + }; + + for member in members.members() { + let field_name = match &member { + Member::MemberBindStmt(bind_stmt) => { + // { local x = value } - object-local binding + extract_bind_name(bind_stmt.obj_local().and_then(|ol| ol.bind())) + } + Member::MemberFieldNormal(field) => { + // { field: value } or { field:: value } + field.field_name().and_then(extract_static_field_name) + } + Member::MemberFieldMethod(method) => { + // { method(...): value } + method.field_name().and_then(extract_static_field_name) + } + Member::MemberAssertStmt(_) => None, // assert doesn't define a field + }; + + if let Some(name) = field_name { + let range = member.syntax().text_range(); + if let Some(first_range) = seen.get(&name) { + diagnostics.push(Diagnostic { + range: ctx.to_lsp_range(range), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("duplicate-field".to_string())), + source: Some("jrsonnet-lsp".to_string()), + message: format!("duplicate field `{name}`"), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: ctx.uri.clone(), + range: ctx.to_lsp_range(*first_range), + }, + message: "first definition here".to_string(), + }]), + ..Default::default() + }); + } else { + seen.insert(name, range); + } + } + } +} + +/// Extract a name from an optional Bind node. +fn extract_bind_name(bind: Option) -> Option { + let bind = bind?; + match bind { + Bind::BindDestruct(bd) => { + // Use BindDestruct::into to get Option + // (note: calling bd.into() directly can be ambiguous with Into trait) + let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bd)?; + if let Destruct::DestructFull(full) = destruct { + full.name() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + } else { + None + } + } + Bind::BindFunction(bf) => bf + .name() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()), + } +} + +/// Extract a static field name from a `FieldName` node. +fn extract_static_field_name(field_name: FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => { + // FieldNameFixed has id() for identifier and text() for string literals + fixed + .id() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + } + FieldName::FieldNameDynamic(_) => None, // Dynamic field names can't be statically checked + } +} + +/// Check for duplicate function parameters in the entire AST. +pub(super) fn check_duplicate_params( + node: &SyntaxNode, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + // Find all function expressions + for child in node.descendants() { + if child.kind() == SyntaxKind::EXPR_FUNCTION { + if let Some(func) = ExprFunction::cast(child) { + check_function_for_duplicate_params(&func, ctx, diagnostics); + } + } + } +} + +/// Check a single function expression for duplicate parameters. +fn check_function_for_duplicate_params( + func: &ExprFunction, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + let Some(params_desc) = func.params_desc() else { + return; + }; + + let mut seen: FxHashMap = FxHashMap::default(); + + for param in params_desc.params() { + // Extract parameter name from destruct + let param_name = param.destruct().and_then(|d| match d { + Destruct::DestructFull(full) => full + .name() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()), + _ => None, // Array/object destructuring is more complex + }); + + if let Some(name) = param_name { + let range = param.syntax().text_range(); + if let Some(first_range) = seen.get(&name) { + diagnostics.push(Diagnostic { + range: ctx.to_lsp_range(range), + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("duplicate-param".to_string())), + source: Some("jrsonnet-lsp".to_string()), + message: format!("duplicate parameter `{name}`"), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: ctx.uri.clone(), + range: ctx.to_lsp_range(*first_range), + }, + message: "first definition here".to_string(), + }]), + ..Default::default() + }); + } else { + seen.insert(name, range); + } + } + } +} diff --git a/crates/jrsonnet-lsp-check/src/lint/mod.rs b/crates/jrsonnet-lsp-check/src/lint/mod.rs new file mode 100644 index 00000000..23494983 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/lint/mod.rs @@ -0,0 +1,219 @@ +//! Lint diagnostics for Jsonnet code. +//! +//! Provides static analysis warnings that don't require evaluation: +//! - Unused variables +//! - Shadowed variables +//! - Unreachable code (via type inference) + +mod duplicates; +mod unreachable; +mod unused_shadow; + +use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; +use jrsonnet_lsp_inference::{TypeAnalysis, TypeEnv}; +use jrsonnet_rowan_parser::AstNode; +use lsp_types::{ + Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, Location, NumberOrString, Range, +}; +use rowan::TextRange; + +use self::{ + duplicates::{check_duplicate_fields, check_duplicate_params}, + unreachable::check_unreachable_code, + unused_shadow::{check_shadowed_variables, check_unused_variables}, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum LintRule { + UnusedVariables, + UnreachableCode, + ShadowedVariables, + TypeErrors, + DuplicateFields, + DuplicateParams, +} + +impl LintRule { + const fn bit(self) -> u8 { + match self { + Self::UnusedVariables => 1 << 0, + Self::UnreachableCode => 1 << 1, + Self::ShadowedVariables => 1 << 2, + Self::TypeErrors => 1 << 3, + Self::DuplicateFields => 1 << 4, + Self::DuplicateParams => 1 << 5, + } + } +} + +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +pub struct LintConfig { + enabled: u8, +} + +impl LintConfig { + /// Enable a single lint rule in this config. + #[must_use] + pub fn with_enabled(mut self, rule: LintRule) -> Self { + self.enable(rule); + self + } + + /// Enable a single lint rule in this config. + pub fn enable(&mut self, rule: LintRule) { + self.enabled |= rule.bit(); + } + + /// Check whether a lint rule is enabled. + #[must_use] + pub fn is_enabled(self, rule: LintRule) -> bool { + self.enabled & rule.bit() != 0 + } + + /// Create a config with all lints enabled. + #[must_use] + pub fn all() -> Self { + Self { + enabled: LintRule::UnusedVariables.bit() + | LintRule::UnreachableCode.bit() + | LintRule::ShadowedVariables.bit() + | LintRule::TypeErrors.bit() + | LintRule::DuplicateFields.bit() + | LintRule::DuplicateParams.bit(), + } + } + + /// Create a config with all lints enabled except type-error checks. + #[must_use] + pub fn all_except_type_errors() -> Self { + Self { + enabled: LintRule::UnusedVariables.bit() + | LintRule::UnreachableCode.bit() + | LintRule::ShadowedVariables.bit() + | LintRule::DuplicateFields.bit() + | LintRule::DuplicateParams.bit(), + } + } +} + +/// Run lint checks on a document. +/// +/// The `uri` parameter is used to create related location information in diagnostics. +pub fn lint( + document: &Document, + analysis: &TypeAnalysis, + config: &LintConfig, + uri: &lsp_types::Uri, +) -> Vec { + let mut diagnostics = Vec::new(); + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + if let Some(expr) = ast.expr() { + let ctx = LintContext::new(text, line_index, uri); + + if config.is_enabled(LintRule::UnusedVariables) { + check_unused_variables(&expr, &ctx, &mut diagnostics); + } + + if config.is_enabled(LintRule::ShadowedVariables) { + check_shadowed_variables(expr.syntax(), &ctx, &mut diagnostics); + } + + if config.is_enabled(LintRule::UnreachableCode) { + let mut env = TypeEnv::new_default(); + check_unreachable_code(&expr, &ctx, &mut env, &mut diagnostics); + } + + if config.is_enabled(LintRule::TypeErrors) { + let type_check_config = super::type_check::TypeCheckConfig::all(); + let type_errors = + super::type_check::check_types(document, analysis, &type_check_config); + diagnostics.extend( + type_errors + .into_iter() + .map(|e| e.to_diagnostic(line_index, text, analysis)), + ); + } + + if config.is_enabled(LintRule::DuplicateFields) { + check_duplicate_fields(expr.syntax(), &ctx, &mut diagnostics); + } + + if config.is_enabled(LintRule::DuplicateParams) { + check_duplicate_params(expr.syntax(), &ctx, &mut diagnostics); + } + } + + diagnostics +} + +/// Context for lint checking. +struct LintContext<'a> { + text: &'a str, + line_index: &'a LineIndex, + uri: &'a lsp_types::Uri, +} + +impl<'a> LintContext<'a> { + fn new(text: &'a str, line_index: &'a LineIndex, uri: &'a lsp_types::Uri) -> Self { + Self { + text, + line_index, + uri, + } + } + + fn to_lsp_range(&self, range: TextRange) -> Range { + to_lsp_range(range, self.line_index, self.text) + } + + fn make_diagnostic( + &self, + range: TextRange, + message: String, + severity: DiagnosticSeverity, + code: &str, + ) -> Diagnostic { + Diagnostic { + range: self.to_lsp_range(range), + severity: Some(severity), + code: Some(NumberOrString::String(code.to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message, + related_information: None, + tags: None, + data: None, + } + } + + fn make_diagnostic_with_related( + &self, + range: TextRange, + message: String, + severity: DiagnosticSeverity, + code: &str, + related_range: TextRange, + related_message: String, + ) -> Diagnostic { + Diagnostic { + range: self.to_lsp_range(range), + severity: Some(severity), + code: Some(NumberOrString::String(code.to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message, + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: self.uri.clone(), + range: self.to_lsp_range(related_range), + }, + message: related_message, + }]), + tags: None, + data: None, + } + } +} diff --git a/crates/jrsonnet-lsp-check/src/lint/unreachable.rs b/crates/jrsonnet-lsp-check/src/lint/unreachable.rs new file mode 100644 index 00000000..ec775f54 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/lint/unreachable.rs @@ -0,0 +1,261 @@ +use jrsonnet_lsp_inference::{infer_expr_ty, TypeEnv}; +use jrsonnet_lsp_types::Ty; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, Expr, ExprBase, Stmt}, + AstNode, +}; +use lsp_types::{Diagnostic, DiagnosticSeverity}; +use rowan::TextRange; + +use super::LintContext; + +fn extract_simple_bind_name( + bind: &Bind, +) -> Option<(String, &jrsonnet_rowan_parser::nodes::BindDestruct)> { + let Bind::BindDestruct(bd) = bind else { + return None; + }; + let destruct: Destruct = bd.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let name = full.name()?.ident_lit()?.text().to_string(); + Some((name, bd)) +} + +/// Check for unreachable code in an expression. +/// +/// Unreachable code patterns detected: +/// - Code after an `assert` whose condition is a divergent expression (type Never) +/// - Code after an `assert false` +pub(super) fn check_unreachable_code( + expr: &Expr, + ctx: &LintContext, + env: &mut TypeEnv, + diagnostics: &mut Vec, +) { + // Check statements (locals and asserts) that precede the body + for stmt in expr.stmts() { + match &stmt { + Stmt::StmtAssert(assert_stmt) => { + // Check if the assert condition diverges + if let Some(assertion) = assert_stmt.assertion() { + if let Some(cond) = assertion.condition() { + let cond_ty = infer_expr_ty(&cond, env); + if cond_ty.is_never() { + // The assert condition itself diverges, so everything after is unreachable + // Find the range of everything after this assert + if let Some(body_range) = find_code_after_stmt(&stmt, expr) { + diagnostics.push( + ctx.make_diagnostic_with_related( + body_range, + "unreachable code: assert condition always diverges" + .to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + cond.syntax().text_range(), + "divergent expression here".to_string(), + ), + ); + return; // Don't report more unreachable code + } + } + // Also check if condition is statically `false` + if is_statically_false(&cond) { + if let Some(body_range) = find_code_after_stmt(&stmt, expr) { + diagnostics.push( + ctx.make_diagnostic_with_related( + body_range, + "unreachable code: assert condition is always false" + .to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + cond.syntax().text_range(), + "condition is `false`".to_string(), + ), + ); + return; + } + } + } + } + } + Stmt::StmtLocal(local_stmt) => { + // Process local bindings for the type environment + for bind in local_stmt.binds() { + if let Some((name, bd)) = extract_simple_bind_name(&bind) { + let ty = bd.value().map_or(Ty::ANY, |v| infer_expr_ty(&v, env)); + env.define_ty(name, ty); + } + } + } + } + } + + // Recursively check the body expression and nested expressions + if let Some(base) = expr.expr_base() { + check_unreachable_in_base(&base, ctx, env, diagnostics); + } +} + +/// Check if an expression is statically `false`. +fn is_statically_false(expr: &Expr) -> bool { + if let Some(base) = expr.expr_base() { + if let ExprBase::ExprLiteral(lit) = base { + if let Some(literal) = lit.literal() { + return matches!( + literal.kind(), + jrsonnet_rowan_parser::nodes::LiteralKind::FalseKw + ); + } + } + } + false +} + +/// Find the range of code after a statement in an expression. +fn find_code_after_stmt(stmt: &Stmt, expr: &Expr) -> Option { + let stmt_end = stmt.syntax().text_range().end(); + let expr_end = expr.syntax().text_range().end(); + + // Check if there's anything after this statement + if stmt_end < expr_end { + // Find the start of the next significant content + let next_start = stmt_end; + Some(TextRange::new(next_start, expr_end)) + } else { + None + } +} + +/// Check for unreachable code in a base expression. +fn check_unreachable_in_base( + base: &ExprBase, + ctx: &LintContext, + env: &mut TypeEnv, + diagnostics: &mut Vec, +) { + match base { + ExprBase::ExprBinary(binary) => { + // Check if left operand is divergent + if let Some(lhs) = binary.lhs() { + let lhs_ty = infer_expr_ty(&lhs, env); + if lhs_ty.is_never() { + // Right operand is unreachable + if let Some(rhs) = binary.rhs() { + diagnostics.push(ctx.make_diagnostic_with_related( + rhs.syntax().text_range(), + "unreachable code: left operand always diverges".to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + lhs.syntax().text_range(), + "divergent expression here".to_string(), + )); + return; + } + } + // Recursively check left operand + check_unreachable_code(&lhs, ctx, env, diagnostics); + } + // Recursively check right operand + if let Some(rhs) = binary.rhs() { + check_unreachable_code(&rhs, ctx, env, diagnostics); + } + } + ExprBase::ExprIfThenElse(if_expr) => { + // Check condition for divergence + if let Some(cond) = if_expr.cond() { + let cond_ty = infer_expr_ty(&cond, env); + if cond_ty.is_never() { + // Both branches are unreachable + if let Some(then_clause) = if_expr.then() { + if let Some(then_expr) = then_clause.expr() { + diagnostics.push(ctx.make_diagnostic_with_related( + then_expr.syntax().text_range(), + "unreachable code: condition always diverges".to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + cond.syntax().text_range(), + "divergent expression here".to_string(), + )); + } + } + return; + } + // Recursively check condition + check_unreachable_code(&cond, ctx, env, diagnostics); + } + // Recursively check branches + if let Some(then_clause) = if_expr.then() { + if let Some(then_expr) = then_clause.expr() { + check_unreachable_code(&then_expr, ctx, env, diagnostics); + } + } + if let Some(else_clause) = if_expr.else_() { + if let Some(else_expr) = else_clause.expr() { + check_unreachable_code(&else_expr, ctx, env, diagnostics); + } + } + } + ExprBase::ExprParened(parens) => { + if let Some(inner) = parens.expr() { + check_unreachable_code(&inner, ctx, env, diagnostics); + } + } + ExprBase::ExprFunction(func) => { + // Check function body + if let Some(body) = func.expr() { + env.push_scope(); + check_unreachable_code(&body, ctx, env, diagnostics); + env.pop_scope(); + } + } + ExprBase::ExprArray(arr) => { + for elem in arr.exprs() { + check_unreachable_code(&elem, ctx, env, diagnostics); + } + } + ExprBase::ExprObject(obj) => { + if let Some(body) = obj.obj_body() { + check_unreachable_in_obj_body(&body, ctx, env, diagnostics); + } + } + _ => {} + } +} + +/// Check for unreachable code in an object body. +fn check_unreachable_in_obj_body( + body: &jrsonnet_rowan_parser::nodes::ObjBody, + ctx: &LintContext, + env: &mut TypeEnv, + diagnostics: &mut Vec, +) { + use jrsonnet_rowan_parser::nodes::{Member, ObjBody}; + if let ObjBody::ObjBodyMemberList(members) = body { + for member in members.members() { + match member { + Member::MemberFieldNormal(field) => { + if let Some(expr) = field.expr() { + check_unreachable_code(&expr, ctx, env, diagnostics); + } + } + Member::MemberFieldMethod(method) => { + if let Some(expr) = method.expr() { + env.push_scope(); + check_unreachable_code(&expr, ctx, env, diagnostics); + env.pop_scope(); + } + } + Member::MemberAssertStmt(assert_member) => { + if let Some(assertion) = assert_member.assertion() { + if let Some(cond) = assertion.condition() { + check_unreachable_code(&cond, ctx, env, diagnostics); + } + } + } + Member::MemberBindStmt(_) => {} + } + } + } +} diff --git a/crates/jrsonnet-lsp-check/src/lint/unused_shadow.rs b/crates/jrsonnet-lsp-check/src/lint/unused_shadow.rs new file mode 100644 index 00000000..d23eca1b --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/lint/unused_shadow.rs @@ -0,0 +1,915 @@ +use jrsonnet_lsp_scope::ident_resolves_to_builtin_std; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, DestructArrayPart, Expr, ExprVar, ForSpec, Param, StmtLocal}, + AstNode, SyntaxKind, SyntaxNode, +}; +use lsp_types::{Diagnostic, DiagnosticSeverity}; +use rowan::TextRange; +use rustc_hash::{FxHashMap, FxHashSet}; + +use super::LintContext; + +#[derive(Debug, Clone)] +struct ScopeVar { + range: TextRange, +} + +/// A variable definition with its location and name. +#[derive(Debug, Clone)] +struct VarDef { + name: String, + range: TextRange, + /// Whether the variable name starts with underscore (intentionally unused). + is_underscore_prefixed: bool, +} + +pub(super) fn check_unused_variables( + expr: &Expr, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + let mut definitions: FxHashMap> = FxHashMap::default(); + let mut references: FxHashSet = FxHashSet::default(); + + // Collect all definitions and references + collect_definitions_and_references(expr.syntax(), &mut definitions, &mut references); + + // Report unused variables + for (name, defs) in definitions { + // Skip if any reference exists for this name + if references.contains(&name) { + continue; + } + + for def in defs { + // Skip underscore-prefixed variables (intentionally unused) + if def.is_underscore_prefixed { + continue; + } + + diagnostics.push(ctx.make_diagnostic( + def.range, + format!( + "unused variable: `{}`; prefix with `_` to silence this warning", + def.name + ), + DiagnosticSeverity::WARNING, + "unused-variable", + )); + } + } +} + +/// Check for shadowed variables in nested scopes. +pub(super) fn check_shadowed_variables( + node: &SyntaxNode, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + let mut scope_stack: Vec> = vec![FxHashMap::default()]; + check_shadowed_variables_inner(node, ctx, &mut scope_stack, diagnostics); +} + +fn check_shadowed_variables_inner( + node: &SyntaxNode, + ctx: &LintContext, + scope_stack: &mut Vec>, + diagnostics: &mut Vec, +) { + // Check if this node introduces a new scope + // - Functions and for specs create new scopes for their parameters + // - Nested EXPR nodes (sub-expressions) create new scopes for their locals + let introduces_scope = match node.kind() { + SyntaxKind::EXPR_FUNCTION | SyntaxKind::BIND_FUNCTION | SyntaxKind::FOR_SPEC => true, + SyntaxKind::EXPR => { + // Only create a new scope if this EXPR is not the root (has a parent EXPR) + // This handles cases like `local y = (local x = 2; x)` where the inner + // expression creates a new scope + node.parent() + .is_some_and(|p| p.kind() != SyntaxKind::SOURCE_FILE) + } + _ => false, + }; + + if introduces_scope { + scope_stack.push(FxHashMap::default()); + } + + // Process definitions at this node + match node.kind() { + SyntaxKind::STMT_LOCAL => { + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + check_bind_for_shadow(&bind, ctx, scope_stack, diagnostics); + } + } + } + SyntaxKind::EXPR_FUNCTION => { + if let Some(func) = jrsonnet_rowan_parser::nodes::ExprFunction::cast(node.clone()) { + if let Some(params) = func.params_desc() { + for param in params.params() { + check_param_for_shadow(¶m, ctx, scope_stack, diagnostics); + } + } + } + } + SyntaxKind::BIND_FUNCTION => { + if let Some(func) = jrsonnet_rowan_parser::nodes::BindFunction::cast(node.clone()) { + if let Some(params) = func.params() { + for param in params.params() { + check_param_for_shadow(¶m, ctx, scope_stack, diagnostics); + } + } + } + } + SyntaxKind::FOR_SPEC => { + if let Some(for_spec) = ForSpec::cast(node.clone()) { + if let Some(destruct) = for_spec.bind() { + check_destruct_for_shadow(&destruct, ctx, scope_stack, diagnostics); + } + } + } + _ => {} + } + + // Recurse into children + for child in node.children() { + check_shadowed_variables_inner(&child, ctx, scope_stack, diagnostics); + } + + if introduces_scope { + scope_stack.pop(); + } +} + +/// Check if a name shadows a variable in an outer scope. +fn check_for_shadow( + name: &str, + range: TextRange, + ctx: &LintContext, + scope_stack: &mut Vec>, + diagnostics: &mut Vec, +) { + // Skip underscore-prefixed variables + if name.starts_with('_') { + if let Some(current_scope) = scope_stack.last_mut() { + current_scope.insert(name.to_string(), ScopeVar { range }); + } + return; + } + + // Check outer scopes for shadowing (skip the current scope) + let num_scopes = scope_stack.len(); + for scope in scope_stack.iter().take(num_scopes.saturating_sub(1)) { + if let Some(original) = scope.get(name) { + diagnostics.push(ctx.make_diagnostic_with_related( + range, + format!("variable `{name}` shadows a variable from an outer scope"), + DiagnosticSeverity::WARNING, + "shadowed-variable", + original.range, + format!("`{name}` originally defined here"), + )); + break; + } + } + + // Add to current scope + if let Some(current_scope) = scope_stack.last_mut() { + current_scope.insert(name.to_string(), ScopeVar { range }); + } +} + +/// Check a Bind node for shadowing. +fn check_bind_for_shadow( + bind: &Bind, + ctx: &LintContext, + scope_stack: &mut Vec>, + diagnostics: &mut Vec, +) { + match bind { + Bind::BindDestruct(bd) => { + if let Some(destruct) = bd.into() { + check_destruct_for_shadow(&destruct, ctx, scope_stack, diagnostics); + } + } + Bind::BindFunction(bf) => { + if let Some(name_node) = bf.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text(); + check_for_shadow( + name, + name_node.syntax().text_range(), + ctx, + scope_stack, + diagnostics, + ); + } + } + } + } +} + +/// Check a Destruct node for shadowing. +fn check_destruct_for_shadow( + destruct: &Destruct, + ctx: &LintContext, + scope_stack: &mut Vec>, + diagnostics: &mut Vec, +) { + match destruct { + Destruct::DestructFull(full) => { + if let Some(name_node) = full.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text(); + check_for_shadow( + name, + name_node.syntax().text_range(), + ctx, + scope_stack, + diagnostics, + ); + } + } + } + Destruct::DestructArray(arr) => { + for elem in arr.destruct_array_parts() { + if let DestructArrayPart::DestructArrayElement(array_elem) = elem { + if let Some(destruct) = array_elem.destruct() { + check_destruct_for_shadow(&destruct, ctx, scope_stack, diagnostics); + } + } + } + } + Destruct::DestructObject(obj) => { + for field in obj.destruct_object_fields() { + if let Some(destruct) = field.destruct() { + check_destruct_for_shadow(&destruct, ctx, scope_stack, diagnostics); + } + } + } + Destruct::DestructSkip(_) => {} + } +} + +/// Check a Param node for shadowing. +fn check_param_for_shadow( + param: &Param, + ctx: &LintContext, + scope_stack: &mut Vec>, + diagnostics: &mut Vec, +) { + if let Some(destruct) = param.destruct() { + check_destruct_for_shadow(&destruct, ctx, scope_stack, diagnostics); + } +} + +/// Extract variable name from a simple `BindDestruct` (not array/object destructuring). +/// +/// Returns the variable name and a reference to the `BindDestruct` for value access. +fn collect_definitions_and_references( + node: &SyntaxNode, + definitions: &mut FxHashMap>, + references: &mut FxHashSet, +) { + // Process this node + match node.kind() { + SyntaxKind::STMT_LOCAL => { + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + collect_bind_definition(&bind, definitions); + } + } + } + SyntaxKind::EXPR_FUNCTION => { + // Function parameters + if let Some(func) = jrsonnet_rowan_parser::nodes::ExprFunction::cast(node.clone()) { + if let Some(params) = func.params_desc() { + for param in params.params() { + collect_param_definition(¶m, definitions); + } + } + } + } + SyntaxKind::BIND_FUNCTION => { + // Function binding parameters + if let Some(func) = jrsonnet_rowan_parser::nodes::BindFunction::cast(node.clone()) { + if let Some(params) = func.params() { + for param in params.params() { + collect_param_definition(¶m, definitions); + } + } + } + } + SyntaxKind::FOR_SPEC => { + // For comprehension variable + if let Some(for_spec) = ForSpec::cast(node.clone()) { + if let Some(destruct) = for_spec.bind() { + collect_destruct_definition(&destruct, definitions); + } + } + } + SyntaxKind::EXPR_VAR => { + if let Some(var) = ExprVar::cast(node.clone()) { + if let Some(name) = var.name().and_then(|n| n.ident_lit()) { + let text = name.text().to_string(); + if !ident_resolves_to_builtin_std(&name) { + references.insert(text); + } + } + } + } + _ => {} + } + + // Recurse into children + for child in node.children() { + collect_definitions_and_references(&child, definitions, references); + } +} + +/// Collect definition from a Bind node. +fn collect_bind_definition(bind: &Bind, definitions: &mut FxHashMap>) { + match bind { + Bind::BindDestruct(bd) => { + if let Some(destruct) = bd.into() { + collect_destruct_definition(&destruct, definitions); + } + } + Bind::BindFunction(bf) => { + if let Some(name_node) = bf.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text().to_string(); + let is_underscore_prefixed = name.starts_with('_'); + definitions.entry(name.clone()).or_default().push(VarDef { + name, + range: name_node.syntax().text_range(), + is_underscore_prefixed, + }); + } + } + } + } +} + +/// Collect definition from a Destruct node. +fn collect_destruct_definition( + destruct: &Destruct, + definitions: &mut FxHashMap>, +) { + match destruct { + Destruct::DestructFull(full) => { + if let Some(name_node) = full.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text().to_string(); + let is_underscore_prefixed = name.starts_with('_'); + definitions.entry(name.clone()).or_default().push(VarDef { + name, + range: name_node.syntax().text_range(), + is_underscore_prefixed, + }); + } + } + } + Destruct::DestructArray(arr) => { + // Array destructuring: [a, b, c] + for elem in arr.destruct_array_parts() { + if let DestructArrayPart::DestructArrayElement(array_elem) = elem { + if let Some(destruct) = array_elem.destruct() { + collect_destruct_definition(&destruct, definitions); + } + } + } + } + Destruct::DestructObject(obj) => { + // Object destructuring: {a, b, c} + for field in obj.destruct_object_fields() { + if let Some(destruct) = field.destruct() { + collect_destruct_definition(&destruct, definitions); + } + } + } + Destruct::DestructSkip(_) => { + // Skip patterns don't define variables + } + } +} + +/// Collect definition from a Param node. +fn collect_param_definition(param: &Param, definitions: &mut FxHashMap>) { + if let Some(destruct) = param.destruct() { + collect_destruct_definition(&destruct, definitions); + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_inference::TypeAnalysis; + use lsp_types::{ + Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, Location, NumberOrString, + Position, Range, Uri, + }; + + use crate::lint::{lint, LintConfig, LintRule}; + + fn test_uri() -> Uri { + "file:///test.jsonnet".parse().unwrap() + } + + #[derive(Debug, Clone, Copy, PartialEq, Eq)] + struct Span { + start_line: u32, + start_char: u32, + end_line: u32, + end_char: u32, + } + + impl Span { + const fn new(start_line: u32, start_char: u32, end_line: u32, end_char: u32) -> Self { + Self { + start_line, + start_char, + end_line, + end_char, + } + } + + fn to_range(self) -> Range { + Range { + start: Position { + line: self.start_line, + character: self.start_char, + }, + end: Position { + line: self.end_line, + character: self.end_char, + }, + } + } + } + + const fn span(start_line: u32, start_char: u32, end_line: u32, end_char: u32) -> Span { + Span::new(start_line, start_char, end_line, end_char) + } + + fn make_unused_var_diagnostic( + start_line: u32, + start_char: u32, + end_line: u32, + end_char: u32, + name: &str, + ) -> Diagnostic { + Diagnostic { + range: Range { + start: Position { + line: start_line, + character: start_char, + }, + end: Position { + line: end_line, + character: end_char, + }, + }, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("unused-variable".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: format!("unused variable: `{name}`; prefix with `_` to silence this warning"), + related_information: None, + tags: None, + data: None, + } + } + + #[test] + fn test_unused_variable_detected() { + let code = "local x = 1; local y = 2; y"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unused_var_diagnostic(0, 6, 0, 7, "x")] + ); + } + + #[test] + fn test_used_variable_not_flagged() { + let code = "local x = 1; x + 1"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_shadowed_std_reference_counts_for_unused_analysis() { + let code = "local std = 1; std"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_underscore_prefixed_variable_not_flagged() { + let code = "local _unused = 1; local used = 2; used"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + // _unused should not be flagged (intentionally unused) + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_unused_function_parameter() { + let code = "local f(x, y) = y; f(1, 2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unused_var_diagnostic(0, 8, 0, 9, "x")] + ); + } + + #[test] + fn test_for_comprehension_variable_used() { + let code = "[x * 2 for x in [1, 2, 3]]"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_no_lints_when_disabled() { + let code = "local unused = 1; 42"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default(); // All disabled + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + fn make_shadowed_var_diagnostic(range: Span, name: &str, original_range: Span) -> Diagnostic { + Diagnostic { + range: range.to_range(), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("shadowed-variable".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: format!("variable `{name}` shadows a variable from an outer scope"), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: test_uri(), + range: original_range.to_range(), + }, + message: format!("`{name}` originally defined here"), + }]), + tags: None, + data: None, + } + } + + #[test] + fn test_shadowed_variable_in_function() { + // x in function parameter shadows outer x + let code = "local x = 1; local f(x) = x; f(2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::ShadowedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_shadowed_var_diagnostic( + span(0, 21, 0, 22), + "x", + span(0, 6, 0, 7) + )] + ); + } + + #[test] + fn test_shadowed_variable_in_nested_local() { + // Inner x shadows outer x + let code = "local x = 1; local y = (local x = 2; x); x + y"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::ShadowedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_shadowed_var_diagnostic( + span(0, 30, 0, 31), + "x", + span(0, 6, 0, 7) + )] + ); + } + + #[test] + fn test_no_shadow_for_different_names() { + let code = "local x = 1; local f(y) = y; f(x)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::ShadowedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_underscore_prefixed_shadow_not_flagged() { + // _x shadows outer x but underscore prefix silences the warning + let code = "local x = 1; local f(_x) = _x; f(2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::ShadowedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_shadowed_variable_in_for_comprehension() { + // x in for comprehension shadows outer x + let code = "local x = 1; [x for x in [1, 2, 3]]"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::ShadowedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_shadowed_var_diagnostic( + span(0, 20, 0, 21), + "x", + span(0, 6, 0, 7) + )] + ); + } + + fn make_unreachable_diagnostic( + range: Span, + message: &str, + related_range: Span, + related_message: &str, + ) -> Diagnostic { + Diagnostic { + range: range.to_range(), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("unreachable-code".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: message.to_string(), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: test_uri(), + range: related_range.to_range(), + }, + message: related_message.to_string(), + }]), + tags: None, + data: None, + } + } + + #[test] + fn test_unreachable_after_assert_false() { + // Code after `assert false` is unreachable + // "assert false; 42" - char 13 is space after semicolon + let code = "assert false; 42"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnreachableCode); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unreachable_diagnostic( + span(0, 13, 0, 16), + "unreachable code: assert condition is always false", + span(0, 7, 0, 12), + "condition is `false`" + )] + ); + } + + #[test] + fn test_unreachable_after_assert_error() { + // Code after `assert error "msg"` is unreachable because condition diverges + // "assert error "fail"; 42" - char 20 is space after semicolon + let code = r#"assert error "fail"; 42"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnreachableCode); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unreachable_diagnostic( + span(0, 20, 0, 23), + "unreachable code: assert condition always diverges", + span(0, 7, 0, 19), + "divergent expression here" + )] + ); + } + + #[test] + fn test_unreachable_rhs_of_binary_with_parens() { + // `(error "fail") + 1` - right operand is unreachable because left diverges + // Note: without parens, `error "fail" + 1` is parsed as `error ("fail" + 1)` + let code = r#"(error "fail") + 1"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnreachableCode); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unreachable_diagnostic( + span(0, 17, 0, 18), + "unreachable code: left operand always diverges", + span(0, 0, 0, 14), + "divergent expression here" + )] + ); + } + + #[test] + fn test_unreachable_branches_when_condition_diverges() { + // Both branches are unreachable if condition is error + let code = r#"if error "fail" then 1 else 2"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnreachableCode); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unreachable_diagnostic( + span(0, 21, 0, 22), + "unreachable code: condition always diverges", + span(0, 3, 0, 15), + "divergent expression here" + )] + ); + } + + #[test] + fn test_no_unreachable_with_valid_code() { + // Normal code should not trigger unreachable warnings + let code = "assert true; 42"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnreachableCode); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_no_unreachable_when_disabled() { + // Unreachable code warnings should not be reported when disabled + let code = "assert false; 42"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default(); // All disabled + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + fn make_duplicate_field_diagnostic(range: Span, name: &str, first_range: Span) -> Diagnostic { + Diagnostic { + range: range.to_range(), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("duplicate-field".to_string())), + code_description: None, + source: Some("jrsonnet-lsp".to_string()), + message: format!("duplicate field `{name}`"), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: test_uri(), + range: first_range.to_range(), + }, + message: "first definition here".to_string(), + }]), + tags: None, + data: None, + } + } + + fn make_duplicate_param_diagnostic(range: Span, name: &str, first_range: Span) -> Diagnostic { + Diagnostic { + range: range.to_range(), + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("duplicate-param".to_string())), + code_description: None, + source: Some("jrsonnet-lsp".to_string()), + message: format!("duplicate parameter `{name}`"), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: test_uri(), + range: first_range.to_range(), + }, + message: "first definition here".to_string(), + }]), + tags: None, + data: None, + } + } + + #[test] + fn test_duplicate_field_detected() { + // { a: 1, b: 2, a: 3 } + // First 'a' is at chars 2-6 (a: 1), second 'a' is at chars 14-18 (a: 3) + let code = "{ a: 1, b: 2, a: 3 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::DuplicateFields); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_duplicate_field_diagnostic( + span(0, 14, 0, 18), + "a", + span(0, 2, 0, 6) + )] + ); + } + + #[test] + fn test_no_duplicate_field_for_different_names() { + let code = "{ a: 1, b: 2, c: 3 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::DuplicateFields); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_duplicate_param_detected() { + // function(a, b, a) a + b + // First 'a' is at char 9, second 'a' is at char 15 + let code = "function(a, b, a) a + b"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::DuplicateParams); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_duplicate_param_diagnostic( + span(0, 15, 0, 16), + "a", + span(0, 9, 0, 10) + )] + ); + } + + #[test] + fn test_no_duplicate_param_for_different_names() { + let code = "function(a, b, c) a + b + c"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::DuplicateParams); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_duplicate_field_disabled() { + let code = "{ a: 1, a: 2 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default(); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_duplicate_param_disabled() { + let code = "function(a, a) a"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default(); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } +} diff --git a/crates/jrsonnet-lsp-check/src/type_check/calls.rs b/crates/jrsonnet-lsp-check/src/type_check/calls.rs new file mode 100644 index 00000000..65044818 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/type_check/calls.rs @@ -0,0 +1,615 @@ +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, ident_resolves_to_builtin_std}; +use jrsonnet_lsp_stdlib::get_stdlib_signature; +use jrsonnet_lsp_types::{FunctionData, TyData}; +use jrsonnet_rowan_parser::{ + nodes::{Expr, ExprBase, ExprCall}, + AstNode, +}; +use rowan::TextRange; + +use super::{TypeError, TypeErrorKind}; +use crate::format_check::{parse_format_string, FormatPlaceholder, FormatTypeKind}; + +/// Validate a function call using `FunctionData` (Ty-native version). +pub(super) fn validate_function_call_ty( + func_data: &FunctionData, + function_name: String, + arg_count: usize, + range: TextRange, +) -> Option { + // Count required parameters (those without defaults) + let required = func_data.params.iter().filter(|p| !p.has_default).count(); + let total = func_data.params.len(); + + if arg_count < required { + Some(TypeError { + kind: TypeErrorKind::TooFewArguments { + function_name, + required, + provided: arg_count, + }, + range, + }) + } else if arg_count > total && !func_data.variadic { + Some(TypeError { + kind: TypeErrorKind::TooManyArguments { + function_name, + max_allowed: total, + provided: arg_count, + }, + range, + }) + } else { + None + } +} + +fn collect_unknown_named_argument_errors( + call: &ExprCall, + func_data: &FunctionData, + function_name: &str, +) -> Vec { + let expected: Vec = func_data + .params + .iter() + .map(|param| param.name.clone()) + .collect(); + let mut errors = Vec::new(); + let Some(args_desc) = call.args_desc() else { + return errors; + }; + + for arg in args_desc.args() { + let Some(arg_name) = arg + .name() + .and_then(|name| name.ident_lit()) + .map(|token| token.text().to_string()) + else { + continue; + }; + + if expected.iter().any(|candidate| candidate == &arg_name) { + continue; + } + + errors.push(TypeError { + kind: TypeErrorKind::UnknownNamedArgument { + function_name: function_name.to_string(), + arg_name, + expected: expected.clone(), + }, + range: arg.syntax().text_range(), + }); + } + + errors +} + +/// Check if an `ExprCall` is a stdlib function call and validate argument count and types. +/// +/// Matches the pattern: `std.functionName(args...)`, including aliases that +/// resolve to the builtin std object. +pub(super) fn check_stdlib_call_expr( + call: &ExprCall, + analysis: &TypeAnalysis, + errors: &mut Vec, +) { + // Get the callee - should be std.functionName (ExprField) + let Some(callee_expr) = call.callee() else { + return; + }; + let Some(ExprBase::ExprField(field)) = callee_expr.expr_base() else { + return; + }; + + // Check if base resolves to builtin std. + let Some(base_expr) = field.base() else { + return; + }; + if !expr_resolves_to_builtin_std(&base_expr) { + return; + } + + // Extract function name from the field + let Some(fn_name) = field + .field() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + else { + return; + }; + + // Look up signature + let Some(sig) = get_stdlib_signature(&fn_name) else { + return; + }; + + // Count arguments + let arg_count = call.args_desc().map_or(0, |args| args.args().count()); + + // Validate using the unified function + let qualified_name = format!("std.{fn_name}"); + let Some(func_data) = sig.func_data() else { + return; + }; + let unknown_named_argument_errors = + collect_unknown_named_argument_errors(call, &func_data, &qualified_name); + if !unknown_named_argument_errors.is_empty() { + errors.extend(unknown_named_argument_errors); + return; + } + if let Some(error) = validate_function_call_ty( + &func_data, + qualified_name.clone(), + arg_count, + call.syntax().text_range(), + ) { + errors.push(error); + } + + // Check argument types + if let Some(args_desc) = call.args_desc() { + for (i, arg) in args_desc.args().enumerate() { + if i >= func_data.params.len() { + break; // Variadic or too many args - handled elsewhere + } + + let Some(param) = func_data.params.get(i) else { + break; + }; + let stdlib_expected_ty = param.ty; + + // Skip if expected type is Any (no constraint) + if stdlib_expected_ty.is_any() { + continue; + } + + // Get the argument's inferred type + let Some(arg_expr) = arg.expr() else { + continue; + }; + let Some(actual_ty) = analysis.type_for_range(arg_expr.syntax().text_range()) else { + continue; + }; + + // Skip if actual type is Any or Never + if actual_ty.is_any() || actual_ty.is_never() { + continue; + } + + // Import expected type from stdlib store into analysis store + let expected_ty = analysis.import_from_stdlib(stdlib_expected_ty); + + // Check if actual type is subtype of expected type + // Special case: if expected is function_any() (no params), accept any function + // This handles higher-order functions like std.map where we accept any callable + let is_function_wildcard = { + let stdlib_store = jrsonnet_lsp_stdlib::stdlib_store(); + match *stdlib_store.get(stdlib_expected_ty) { + TyData::Function(ref f) => f.params.is_empty(), + _ => false, + } + }; + let type_matches = if is_function_wildcard { + analysis.is_function(actual_ty) + } else { + analysis.is_subtype(actual_ty, expected_ty) + }; + + if !type_matches { + errors.push(TypeError { + kind: TypeErrorKind::ArgumentTypeMismatch { + function_name: qualified_name.clone(), + param_name: param.name.clone(), + param_index: i, + expected: expected_ty, + actual: actual_ty, + }, + range: arg_expr.syntax().text_range(), + }); + } + } + } + + // Special validation for std.format + if fn_name == "format" { + check_format_call(call, analysis, errors); + } + + // Higher-order function validation + check_higher_order_call(&fn_name, call, analysis, errors); +} + +/// Configuration for higher-order function validation. +struct HigherOrderConfig { + /// Name of the callback parameter. + callback_param_name: &'static str, + /// Index of the callback argument (0-based). + callback_arg_index: usize, + /// Index of the array argument (0-based). + array_arg_index: usize, +} + +/// Validate higher-order function calls. +/// +/// Checks that callback function parameters are compatible with array element types. +/// For example, in `std.map(func, arr)`, we verify that `func` can accept elements of `arr`. +fn check_higher_order_call( + fn_name: &str, + call: &ExprCall, + analysis: &TypeAnalysis, + errors: &mut Vec, +) { + // Configuration for higher-order functions + // (callback_param_name, callback_arg_index, array_arg_index) + let config: Option = match fn_name { + "map" | "filter" | "flatMap" => Some(HigherOrderConfig { + callback_param_name: "func", + callback_arg_index: 0, + array_arg_index: 1, + }), + "find" | "findIndex" => Some(HigherOrderConfig { + callback_param_name: "func", + callback_arg_index: 1, + array_arg_index: 0, + }), + "sort" | "uniq" => Some(HigherOrderConfig { + callback_param_name: "keyF", + callback_arg_index: 1, + array_arg_index: 0, + }), + // foldl/foldr are more complex (accumulator + element), handle separately if needed + _ => None, + }; + + let Some(config) = config else { + return; + }; + + let Some(args_desc) = call.args_desc() else { + return; + }; + let args: Vec<_> = args_desc.args().collect(); + + // Get the callback and array arguments + let Some(callback_arg) = args.get(config.callback_arg_index) else { + return; + }; + let Some(array_arg) = args.get(config.array_arg_index) else { + return; + }; + + // Get the callback's type + let Some(callback_expr) = callback_arg.expr() else { + return; + }; + let Some(callback_ty) = analysis.type_for_range(callback_expr.syntax().text_range()) else { + return; + }; + + // Get the array's type + let Some(array_expr) = array_arg.expr() else { + return; + }; + let Some(array_ty) = analysis.type_for_range(array_expr.syntax().text_range()) else { + return; + }; + + // Skip if types are Any or Never + if array_ty.is_any() || array_ty.is_never() || callback_ty.is_any() || callback_ty.is_never() { + return; + } + + // Extract element type from array + // First get the data, then create union outside the borrow + let element_info = analysis.with_data(array_ty, |data| match data { + TyData::Array { elem, .. } => Some(Ok(*elem)), + TyData::Tuple { elems } => { + if elems.is_empty() { + None + } else { + Some(Err(elems.clone())) // Need to create union outside borrow + } + } + _ => None, + }); + let element_ty = match element_info { + Some(Ok(ty)) => ty, + Some(Err(elems)) => analysis.union(elems), + None => return, + }; + + // Skip if element type is Any + if element_ty.is_any() { + return; + } + + // Extract the callback's first parameter type + let callback_param_ty = analysis.with_data(callback_ty, |data| match data { + TyData::Function(ft) => ft.params.first().map(|param| param.ty), + _ => None, + }); + let Some(callback_param_ty) = callback_param_ty else { + return; + }; + + // Skip if callback param type is Any + if callback_param_ty.is_any() { + return; + } + + // Check if element type is compatible with callback param type + if !analysis.is_subtype(element_ty, callback_param_ty) { + errors.push(TypeError { + kind: TypeErrorKind::CallbackTypeMismatch { + function_name: format!("std.{fn_name}"), + callback_param: config.callback_param_name.to_string(), + element_type: element_ty, + callback_param_type: callback_param_ty, + }, + range: callback_expr.syntax().text_range(), + }); + } +} + +/// Validate a `std.format()` call. +/// +/// Checks: +/// - Format string is valid +/// - Argument count matches placeholders +/// - Argument types match expected types (when inferrable) +fn check_format_call(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut Vec) { + let Some(args_desc) = call.args_desc() else { + return; + }; + let args: Vec<_> = args_desc.args().collect(); + if args.is_empty() { + return; + } + + // Get the format string from the first argument + let Some(fmt_arg) = args.first() else { + return; + }; + let Some(fmt_expr) = fmt_arg.expr() else { + return; + }; + + // Try to extract a literal string value + let Some(fmt_string) = get_string_literal(&fmt_expr) else { + return; // Can't validate non-literal format strings + }; + + // Parse the format string + let format_spec = match parse_format_string(&fmt_string) { + Ok(spec) => spec, + Err(parse_error) => { + errors.push(TypeError { + kind: TypeErrorKind::FormatStringError { parse_error }, + range: fmt_expr.syntax().text_range(), + }); + return; + } + }; + + // For positional placeholders, check argument count + // This includes extra args consumed by dynamic width (*) and precision (.*) + if format_spec.uses_positional { + let positional_count = format_spec.positional_arg_count(); + let provided = args.len() - 1; // Exclude format string itself + + if provided != positional_count { + errors.push(TypeError { + kind: TypeErrorKind::FormatArgCount { + expected: positional_count, + provided, + }, + range: call.syntax().text_range(), + }); + return; // Skip type checking if count is wrong + } + + // Check argument types (skip format string, check remaining args) + for (i, placeholder) in format_spec.placeholders.iter().enumerate() { + if let FormatPlaceholder::Positional { + expected_type, + specifier, + .. + } = placeholder + { + // Get the corresponding argument (offset by 1 for format string) + if let Some(arg) = args.get(i + 1) { + if let Some(arg_expr) = arg.expr() { + let Some(actual_ty) = + analysis.type_for_range(arg_expr.syntax().text_range()) + else { + continue; + }; + + // Skip Any types (unknown) + if actual_ty.is_any() || *expected_type == FormatTypeKind::Any { + continue; + } + + // Check type compatibility using FormatTypeKind method + let is_compatible = analysis + .with_store(|store| expected_type.is_compatible_with(actual_ty, store)); + if !is_compatible { + let expected_ty = + analysis.with_store_mut(|store| expected_type.to_ty(store)); + errors.push(TypeError { + kind: TypeErrorKind::FormatArgTypeMismatch { + index: i, + expected: expected_ty, + actual: actual_ty, + specifier: *specifier, + }, + range: arg_expr.syntax().text_range(), + }); + } + } + } + } + } + } +} + +/// Extract a string literal value from an expression. +fn get_string_literal(expr: &Expr) -> Option { + let base = expr.expr_base()?; + match base { + ExprBase::ExprString(s) => { + let text = s.syntax().text().to_string(); + if text.starts_with("|||") { + return parse_text_block_literal(&text); + } + + if let Some(inner) = text + .strip_prefix("@\"") + .and_then(|value| value.strip_suffix('"')) + .or_else(|| { + text.strip_prefix("@'") + .and_then(|value| value.strip_suffix('\'')) + }) { + return Some(inner.to_string()); + } + + text.strip_prefix('"') + .and_then(|value| value.strip_suffix('"')) + .or_else(|| { + text.strip_prefix('\'') + .and_then(|value| value.strip_suffix('\'')) + }) + .map(unescape_string) + } + _ => None, + } +} + +/// Parse Jsonnet text block syntax (`||| ... |||`) into its string content. +fn parse_text_block_literal(text: &str) -> Option { + let after_open = text.strip_prefix("|||")?; + let (_, body_with_terminator) = after_open.split_once('\n')?; + + let mut raw_lines = Vec::new(); + let mut found_terminator = false; + for line in body_with_terminator.split('\n') { + if is_text_block_terminator(line) { + found_terminator = true; + break; + } + raw_lines.push(line); + } + if !found_terminator { + return None; + } + + let indent = raw_lines + .iter() + .find(|line| !line.is_empty()) + .map(|line| { + line.chars() + .take_while(|ch| *ch == ' ' || *ch == '\t') + .collect::() + }) + .unwrap_or_default(); + + let normalized = raw_lines + .into_iter() + .map(|line| { + if indent.is_empty() || line.is_empty() { + line.to_string() + } else { + line.strip_prefix(&indent).unwrap_or(line).to_string() + } + }) + .collect::>(); + + Some(normalized.join("\n")) +} + +fn is_text_block_terminator(line: &str) -> bool { + line.trim_start_matches([' ', '\t']) == "|||" +} + +/// Unescape a string literal (simplified version). +fn unescape_string(s: &str) -> String { + let mut result = String::with_capacity(s.len()); + let mut chars = s.chars().peekable(); + + while let Some(c) = chars.next() { + if c == '\\' { + match chars.next() { + Some('n') => result.push('\n'), + Some('t') => result.push('\t'), + Some('r') => result.push('\r'), + Some('\\') | None => result.push('\\'), + Some('"') => result.push('"'), + Some('\'') => result.push('\''), + Some(other) => { + result.push('\\'); + result.push(other); + } + } + } else { + result.push(c); + } + } + + result +} + +/// Check if an `ExprCall` is a user function call and validate argument count. +/// +/// Matches the pattern: `varName(args...)` where varName is a known function. +pub(super) fn check_user_function_call_expr( + call: &ExprCall, + analysis: &TypeAnalysis, + errors: &mut Vec, +) { + // Get the callee - should be a variable (ExprVar) + let Some(callee_expr) = call.callee() else { + return; + }; + let Some(ExprBase::ExprVar(var)) = callee_expr.expr_base() else { + return; + }; + + let Some(var_ident) = var.name().and_then(|n| n.ident_lit()) else { + return; + }; + let var_name = var_ident.text().to_string(); + + // Skip builtin std - handled by check_stdlib_call_expr. + if ident_resolves_to_builtin_std(&var_ident) { + return; + } + + // Look up the type of the variable + let Some(var_ty) = analysis.type_for_range(var.syntax().text_range()) else { + return; + }; + + // Get function data using Ty-native method + let Some(func_data) = analysis.get_function(var_ty) else { + return; + }; + + // Count arguments + let arg_count = call.args_desc().map_or(0, |args| args.args().count()); + + // Validate using the Ty-native function + let unknown_named_argument_errors = + collect_unknown_named_argument_errors(call, &func_data, &var_name); + if !unknown_named_argument_errors.is_empty() { + errors.extend(unknown_named_argument_errors); + return; + } + + if let Some(error) = + validate_function_call_ty(&func_data, var_name, arg_count, call.syntax().text_range()) + { + errors.push(error); + } +} diff --git a/crates/jrsonnet-lsp-check/src/type_check/core.rs b/crates/jrsonnet-lsp-check/src/type_check/core.rs new file mode 100644 index 00000000..21769c0c --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/type_check/core.rs @@ -0,0 +1,1592 @@ +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_inference::{find_best_match, TypeAnalysis, TypeEnv}; +use jrsonnet_lsp_types::{binary_op_result_ty, unary_op_result_ty, Ty}; +use jrsonnet_rowan_parser::{ + nodes::{ + BinaryOperatorKind, Expr, ExprArray, ExprArrayComp, ExprBase, ExprBinary, ExprCall, + ExprField, ExprFunction, ExprIfThenElse, ExprIndex, ExprObjExtend, ExprObject, ExprParened, + ExprSlice, ExprUnary, Member, ObjBody, UnaryOperatorKind, + }, + AstNode, +}; + +use super::{ + calls::{check_stdlib_call_expr, check_user_function_call_expr}, + TypeCheckConfig, TypeCheckRule, TypeError, TypeErrorKind, +}; + +/// Check types in a document and return any type errors. +pub fn check_types( + document: &Document, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, +) -> Vec { + let mut errors = Vec::new(); + let ast = document.ast(); + let mut env = TypeEnv::new_default(); + + if let Some(expr) = ast.expr() { + check_expr(&expr, analysis, config, &mut env, &mut errors); + } + + errors +} + +/// Check an expression for type errors. +fn check_expr( + expr: &jrsonnet_rowan_parser::nodes::Expr, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + // Process local bindings + for stmt in expr.stmts() { + if let Some(stmt_local) = + jrsonnet_rowan_parser::nodes::StmtLocal::cast(stmt.syntax().clone()) + { + for bind in stmt_local.binds() { + check_bind(&bind, analysis, config, env, errors); + } + } + } + + // Check base expression (this includes ExprCall, ExprField, ExprIndex, etc.) + if let Some(base) = expr.expr_base() { + check_base(&base, analysis, config, env, errors); + } +} + +/// Check a binding for type errors. +fn check_bind( + bind: &jrsonnet_rowan_parser::nodes::Bind, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + use jrsonnet_rowan_parser::nodes::Bind; + match bind { + Bind::BindDestruct(bd) => { + if let Some(value) = bd.value() { + check_expr(&value, analysis, config, env, errors); + } + } + Bind::BindFunction(bf) => { + if let Some(body) = bf.value() { + env.push_scope(); + check_expr(&body, analysis, config, env, errors); + env.pop_scope(); + } + } + } +} + +/// Check a base expression for type errors. +fn check_base( + base: &ExprBase, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + match base { + ExprBase::ExprBinary(binary) => { + check_binary_base(base, binary, analysis, config, env, errors); + } + ExprBase::ExprUnary(unary) => check_unary_base(base, unary, analysis, config, env, errors), + ExprBase::ExprIfThenElse(if_expr) => { + check_if_then_else_base(if_expr, analysis, config, env, errors); + } + ExprBase::ExprParened(parens) => check_parened_base(parens, analysis, config, env, errors), + ExprBase::ExprFunction(func) => check_function_base(func, analysis, config, env, errors), + ExprBase::ExprArray(arr) => check_array_base(arr, analysis, config, env, errors), + ExprBase::ExprObject(obj) => check_object_base(obj, analysis, config, env, errors), + ExprBase::ExprObjExtend(extend) => { + check_obj_extend_base(extend, analysis, config, env, errors); + } + ExprBase::ExprArrayComp(comp) => { + check_array_comp_base(comp, analysis, config, env, errors); + } + ExprBase::ExprField(field) => check_field_base(base, field, analysis, config, env, errors), + ExprBase::ExprIndex(idx) => check_index_base(base, idx, analysis, config, env, errors), + ExprBase::ExprSlice(slice) => check_slice_base(base, slice, analysis, config, env, errors), + ExprBase::ExprCall(call) => check_call_base(base, call, analysis, config, env, errors), + _ => {} + } +} + +fn check_binary_base( + base: &ExprBase, + binary: &ExprBinary, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(lhs) = binary.lhs() { + check_expr(&lhs, analysis, config, env, errors); + } + if let Some(rhs) = binary.rhs() { + check_expr(&rhs, analysis, config, env, errors); + } + if !config.is_enabled(TypeCheckRule::BinaryOps) { + return; + } + let (Some(lhs), Some(rhs), Some(op)) = (binary.lhs(), binary.rhs(), binary.binary_operator()) + else { + return; + }; + let lhs_ty = analysis + .type_for_range(lhs.syntax().text_range()) + .unwrap_or(Ty::ANY); + let rhs_ty = analysis + .type_for_range(rhs.syntax().text_range()) + .unwrap_or(Ty::ANY); + if lhs_ty.is_any() || rhs_ty.is_any() || lhs_ty.is_never() || rhs_ty.is_never() { + return; + } + if let Err(reason) = + analysis.with_store_mut(|store| binary_op_result_ty(op.kind(), lhs_ty, rhs_ty, store)) + { + errors.push(TypeError { + kind: TypeErrorKind::BinaryOpMismatch { + lhs: lhs_ty, + rhs: rhs_ty, + op: binary_op_str(op.kind()), + reason, + }, + range: base.syntax().text_range(), + }); + } +} + +fn check_unary_base( + base: &ExprBase, + unary: &ExprUnary, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + let Some(rhs) = unary.rhs() else { + return; + }; + check_expr(&rhs, analysis, config, env, errors); + if !config.is_enabled(TypeCheckRule::UnaryOps) { + return; + } + let Some(op) = unary.unary_operator() else { + return; + }; + let rhs_ty = analysis + .type_for_range(rhs.syntax().text_range()) + .unwrap_or(Ty::ANY); + if rhs_ty.is_any() || rhs_ty.is_never() { + return; + } + if let Err(reason) = + analysis.with_store_mut(|store| unary_op_result_ty(op.kind(), rhs_ty, store)) + { + errors.push(TypeError { + kind: TypeErrorKind::UnaryOpMismatch { + operand: rhs_ty, + op: unary_op_str(op.kind()), + reason, + }, + range: base.syntax().text_range(), + }); + } +} + +fn check_if_then_else_base( + if_expr: &ExprIfThenElse, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(cond) = if_expr.cond() { + check_expr(&cond, analysis, config, env, errors); + } + if let Some(then_expr) = if_expr.then().and_then(|then_clause| then_clause.expr()) { + check_expr(&then_expr, analysis, config, env, errors); + } + if let Some(else_expr) = if_expr.else_().and_then(|else_clause| else_clause.expr()) { + check_expr(&else_expr, analysis, config, env, errors); + } +} + +fn check_parened_base( + parens: &ExprParened, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(inner) = parens.expr() { + check_expr(&inner, analysis, config, env, errors); + } +} + +fn check_function_base( + func: &ExprFunction, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + let Some(body) = func.expr() else { + return; + }; + env.push_scope(); + check_expr(&body, analysis, config, env, errors); + env.pop_scope(); +} + +fn check_array_base( + arr: &ExprArray, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + for elem in arr.exprs() { + check_expr(&elem, analysis, config, env, errors); + } +} + +fn check_object_base( + obj: &ExprObject, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(body) = obj.obj_body() { + check_obj_body(&body, analysis, config, env, errors); + } +} + +fn check_obj_extend_base( + extend: &ExprObjExtend, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(inner) = extend.expr() { + check_expr(&inner, analysis, config, env, errors); + } +} + +fn check_array_comp_base( + comp: &ExprArrayComp, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(expr) = comp.expr() { + check_expr(&expr, analysis, config, env, errors); + } +} + +fn check_field_base( + base: &ExprBase, + field: &ExprField, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(base_expr) = field.base() { + check_expr(&base_expr, analysis, config, env, errors); + } + if !config.is_enabled(TypeCheckRule::FieldAccess) { + return; + } + let Some(base_expr) = field.base() else { + return; + }; + let base_ty = analysis + .type_for_range(base_expr.syntax().text_range()) + .unwrap_or(Ty::ANY); + if base_ty.is_any() || base_ty.is_never() { + return; + } + if !analysis.supports_field_access(base_ty) { + errors.push(TypeError { + kind: TypeErrorKind::FieldAccessOnNonObject { actual: base_ty }, + range: base.syntax().text_range(), + }); + return; + } + if !analysis.is_closed_object(base_ty) { + return; + } + let Some(field_name) = field + .field() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + else { + return; + }; + if analysis.object_has_field(base_ty, &field_name) != Some(false) { + return; + } + let mut available = analysis.object_field_names(base_ty).unwrap_or_default(); + available.sort(); + let suggestion = find_best_match( + &field_name, + available.iter().map(std::string::String::as_str), + ) + .map(std::string::ToString::to_string); + errors.push(TypeError { + kind: TypeErrorKind::NoSuchField { + field: field_name, + available, + suggestion, + }, + range: base.syntax().text_range(), + }); +} + +fn check_index_base( + base: &ExprBase, + idx: &ExprIndex, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(base_expr) = idx.base() { + check_expr(&base_expr, analysis, config, env, errors); + } + if let Some(index_expr) = idx.index() { + check_expr(&index_expr, analysis, config, env, errors); + } + if !config.is_enabled(TypeCheckRule::IndexAccess) { + return; + } + let Some(base_expr) = idx.base() else { + return; + }; + let base_ty = analysis + .type_for_range(base_expr.syntax().text_range()) + .unwrap_or(Ty::ANY); + if base_ty.is_any() || base_ty.is_never() { + return; + } + if !analysis.is_indexable(base_ty) { + errors.push(TypeError { + kind: TypeErrorKind::IndexOnNonIndexable { actual: base_ty }, + range: base.syntax().text_range(), + }); + return; + } + let Some(tuple_len) = analysis.tuple_len(base_ty) else { + return; + }; + let Some(index) = get_constant_index(idx.index().as_ref()) else { + return; + }; + if index >= tuple_len { + errors.push(TypeError { + kind: TypeErrorKind::TupleIndexOutOfBounds { tuple_len, index }, + range: base.syntax().text_range(), + }); + } +} + +fn check_slice_base( + base: &ExprBase, + slice: &ExprSlice, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(base_expr) = slice.base() { + check_expr(&base_expr, analysis, config, env, errors); + } + if !config.is_enabled(TypeCheckRule::IndexAccess) { + return; + } + let Some(base_expr) = slice.base() else { + return; + }; + let base_ty = analysis + .type_for_range(base_expr.syntax().text_range()) + .unwrap_or(Ty::ANY); + if !base_ty.is_any() && !base_ty.is_never() && !analysis.is_sliceable(base_ty) { + errors.push(TypeError { + kind: TypeErrorKind::IndexOnNonIndexable { actual: base_ty }, + range: base.syntax().text_range(), + }); + } +} + +fn check_call_base( + base: &ExprBase, + call: &ExprCall, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(callee_expr) = call.callee() { + check_expr(&callee_expr, analysis, config, env, errors); + } + if let Some(args_desc) = call.args_desc() { + for arg in args_desc.args() { + if let Some(arg_expr) = arg.expr() { + check_expr(&arg_expr, analysis, config, env, errors); + } + } + } + if !config.is_enabled(TypeCheckRule::CallChecks) { + return; + } + if let Some(callee_expr) = call.callee() { + let callee_ty = analysis + .type_for_range(callee_expr.syntax().text_range()) + .unwrap_or(Ty::ANY); + if !callee_ty.is_any() && !callee_ty.is_never() && !analysis.is_callable(callee_ty) { + errors.push(TypeError { + kind: TypeErrorKind::CallOnNonFunction { actual: callee_ty }, + range: base.syntax().text_range(), + }); + } + } + check_stdlib_call_expr(call, analysis, errors); + check_user_function_call_expr(call, analysis, errors); +} + +/// Check an object body for type errors. +fn check_obj_body( + body: &ObjBody, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let ObjBody::ObjBodyMemberList(members) = body { + for member in members.members() { + match member { + Member::MemberFieldNormal(field) => { + if let Some(expr) = field.expr() { + check_expr(&expr, analysis, config, env, errors); + } + } + Member::MemberFieldMethod(method) => { + if let Some(expr) = method.expr() { + env.push_scope(); + check_expr(&expr, analysis, config, env, errors); + env.pop_scope(); + } + } + Member::MemberAssertStmt(assert_member) => { + if let Some(assertion) = assert_member.assertion() { + if let Some(cond) = assertion.condition() { + check_expr(&cond, analysis, config, env, errors); + } + } + } + Member::MemberBindStmt(_) => {} + } + } + } +} + +/// Get the string representation of a binary operator. +fn binary_op_str(op: BinaryOperatorKind) -> &'static str { + match op { + BinaryOperatorKind::Plus => "+", + BinaryOperatorKind::Minus => "-", + BinaryOperatorKind::Mul => "*", + BinaryOperatorKind::Div => "/", + BinaryOperatorKind::Modulo => "%", + BinaryOperatorKind::And => "&&", + BinaryOperatorKind::Or => "||", + BinaryOperatorKind::BitAnd => "&", + BinaryOperatorKind::BitOr => "|", + BinaryOperatorKind::BitXor => "^", + BinaryOperatorKind::Lhs => "<<", + BinaryOperatorKind::Rhs => ">>", + BinaryOperatorKind::Eq => "==", + BinaryOperatorKind::Ne => "!=", + BinaryOperatorKind::Lt => "<", + BinaryOperatorKind::Gt => ">", + BinaryOperatorKind::Le => "<=", + BinaryOperatorKind::Ge => ">=", + BinaryOperatorKind::InKw => "in", + BinaryOperatorKind::NullCoaelse => "??", + BinaryOperatorKind::MetaObjectApply => "+:", + BinaryOperatorKind::ErrorNoOperator => "", + } +} + +/// Get the string representation of a unary operator. +fn unary_op_str(op: UnaryOperatorKind) -> &'static str { + match op { + UnaryOperatorKind::Not => "!", + UnaryOperatorKind::Minus => "-", + UnaryOperatorKind::BitNot => "~", + } +} + +fn non_negative_integral_usize(value: f64) -> Option { + if !(value.is_finite() && value >= 0.0 && value.fract() == 0.0) { + return None; + } + format!("{value:.0}").parse().ok() +} + +/// Extract a constant index value from an expression. +/// +/// Returns `Some(index)` if the expression is a non-negative integer literal. +fn get_constant_index(expr: Option<&Expr>) -> Option { + let expr = expr?; + let ExprBase::ExprNumber(num) = expr.expr_base()? else { + return None; + }; + let text = num.syntax().text().to_string(); + let value: f64 = text.parse().ok()?; + + non_negative_integral_usize(value) +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + fn check_code(code: &str) -> Vec { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze(&doc); + let config = TypeCheckConfig::all(); + check_types(&doc, &analysis, &config) + } + + #[test] + fn test_string_plus_object_error() { + let errors = check_code(r#""str" + {}"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::BinaryOpMismatch { .. }, + .. + }] + ), + "expected single BinaryOpMismatch, got: {errors:?}" + ); + } + + #[test] + fn test_number_field_access_error() { + // Use parentheses to ensure parser treats this as field access on number + let errors = check_code("(42).foo"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FieldAccessOnNonObject { .. }, + .. + }] + ), + "expected single FieldAccessOnNonObject, got: {errors:?}" + ); + } + + #[test] + fn test_number_index_error() { + let errors = check_code("42[0]"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::IndexOnNonIndexable { .. }, + .. + }] + ), + "expected single IndexOnNonIndexable, got: {errors:?}" + ); + } + + #[test] + fn test_string_call_error() { + let errors = check_code(r#""hello"()"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::CallOnNonFunction { .. }, + .. + }] + ), + "expected single CallOnNonFunction, got: {errors:?}" + ); + } + + #[test] + fn test_unary_not_on_object_error() { + // Use parentheses to ensure parser parses correctly + let errors = check_code("!({})"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::UnaryOpMismatch { .. }, + .. + }] + ), + "expected single UnaryOpMismatch, got: {errors:?}" + ); + } + + #[test] + fn test_unary_minus_on_string_error() { + // Use parentheses to ensure parser parses correctly + let errors = check_code(r#"-("hello")"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::UnaryOpMismatch { .. }, + .. + }] + ), + "expected single UnaryOpMismatch, got: {errors:?}" + ); + } + + #[test] + fn test_valid_number_addition() { + let errors = check_code("1 + 2"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_string_concatenation() { + let errors = check_code(r#""hello" + "world""#); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_array_concatenation() { + let errors = check_code("[1, 2] + [3, 4]"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_object_merge() { + let errors = check_code("{a: 1} + {b: 2}"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_field_access() { + let errors = check_code("{a: 1}.a"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_array_index() { + let errors = check_code("[1, 2, 3][0]"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_function_call() { + let errors = check_code("(function(x) x)(1)"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_any_type_no_error() { + // Calling an unknown stdlib function returns Any, so no type error + let errors = check_code("std.foo() + 1"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_bitwise_on_strings_error() { + let errors = check_code(r#""a" | "b""#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::BinaryOpMismatch { .. }, + .. + }] + ), + "expected single BinaryOpMismatch, got: {errors:?}" + ); + } + + #[test] + fn test_in_operator_valid() { + let errors = check_code(r#""foo" in {foo: 1}"#); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_in_operator_invalid_lhs() { + let errors = check_code("42 in {foo: 1}"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::BinaryOpMismatch { .. }, + .. + }] + ), + "expected single BinaryOpMismatch, got: {errors:?}" + ); + } + + #[test] + fn test_comparison_always_valid() { + // Comparison operators work on any types + let errors = check_code(r#""a" == 1"#); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_null_coalesce_always_valid() { + let errors = check_code("null ?? 1"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_stdlib_too_few_args() { + // std.map requires 2 arguments + let errors = check_code("std.map(function(x) x)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TooFewArguments { + function_name, + required: 2, + provided: 1, + }, + .. + }] if function_name == "std.map" + ), + "expected TooFewArguments for std.map, got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_too_many_args() { + // std.length takes 1 argument + let errors = check_code("std.length([1, 2], 3)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TooManyArguments { + function_name, + max_allowed: 1, + provided: 2, + }, + .. + }] if function_name == "std.length" + ), + "expected TooManyArguments for std.length, got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_correct_arg_count() { + // std.map with correct 2 arguments + let errors = check_code("std.map(function(x) x, [1, 2, 3])"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_stdlib_alias_uses_builtin_signature() { + let errors = check_code("local s = std; s.length([1, 2], 3)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TooManyArguments { + function_name, + max_allowed: 1, + provided: 2, + }, + .. + }] if function_name == "std.length" + ), + "expected TooManyArguments for alias call to std.length, got: {errors:?}" + ); + } + + #[test] + fn test_shadowed_std_does_not_use_builtin_signature() { + let errors = check_code("local std = { length(x, y): x }; std.length(1, 2)"); + assert_eq!( + errors.as_slice(), + &[], + "expected no stdlib arg-count checks for shadowed std, got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_optional_args() { + // std.sort has 1 required and 1 optional argument + let errors = check_code("std.sort([3, 1, 2])"); + assert!( + errors.is_empty(), + "expected no errors for sort with 1 arg, got: {errors:?}" + ); + + // With optional argument + let errors = check_code("std.sort([3, 1, 2], function(x) x)"); + assert!( + errors.is_empty(), + "expected no errors for sort with 2 args, got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_variadic() { + // std.format is variadic + let errors = check_code(r#"std.format("%s %d %s", "a", 1, "b")"#); + assert!( + errors.is_empty(), + "expected no errors for variadic format, got: {errors:?}" + ); + } + + #[test] + fn test_user_function_too_few_args() { + // User function with 2 required parameters called with 1 arg + let errors = check_code("local add(a, b) = a + b; add(1)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TooFewArguments { + function_name, + required: 2, + provided: 1, + }, + .. + }] if function_name == "add" + ), + "expected TooFewArguments for add, got: {errors:?}" + ); + } + + #[test] + fn test_user_function_too_many_args() { + // User function with 1 parameter called with 2 args + let errors = check_code("local double(x) = x * 2; double(1, 2)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TooManyArguments { + function_name, + max_allowed: 1, + provided: 2, + }, + .. + }] if function_name == "double" + ), + "expected TooManyArguments for double, got: {errors:?}" + ); + } + + #[test] + fn test_user_function_correct_arg_count() { + // User function with correct argument count + let errors = check_code("local add(a, b) = a + b; add(1, 2)"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_user_function_unknown_named_arg() { + let errors = check_code("local add(x, y) = x + y; add(z = 1)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::UnknownNamedArgument { + function_name, + arg_name, + expected, + }, + .. + }] if function_name == "add" + && arg_name == "z" + && expected == &["x", "y"] + ), + "expected UnknownNamedArgument for add(z = 1), got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_unknown_named_arg() { + let errors = check_code("std.length(y = [1, 2, 3])"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::UnknownNamedArgument { + function_name, + arg_name, + expected, + }, + .. + }] if function_name == "std.length" + && arg_name == "y" + && expected == &["x"] + ), + "expected UnknownNamedArgument for std.length(y = ...), got: {errors:?}" + ); + } + + #[test] + fn test_user_function_with_defaults() { + // User function with default parameter + let errors = check_code( + "local greet(name, greeting='Hello') = greeting + ' ' + name; greet('world')", + ); + assert!( + errors.is_empty(), + "expected no errors for function with default param, got: {errors:?}" + ); + + // With both args + let errors = check_code( + "local greet(name, greeting='Hello') = greeting + ' ' + name; greet('world', 'Hi')", + ); + assert!( + errors.is_empty(), + "expected no errors for function with both args, got: {errors:?}" + ); + } + + #[test] + fn test_user_function_all_defaults() { + // Function with only default parameters - can be called with 0 args + let errors = check_code("local f(a=1, b=2) = a + b; f()"); + assert!( + errors.is_empty(), + "expected no errors for function with all defaults, got: {errors:?}" + ); + } + + #[test] + fn test_no_such_field_error() { + // Access non-existent field on object with known structure + let errors = check_code("{a: 1, b: 2}.c"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, available, .. }, + .. + }] if field == "c" && available == &["a", "b"] + ), + "expected NoSuchField error for field 'c', got: {errors:?}" + ); + } + + #[test] + fn test_valid_field_access_known_object() { + // Access existing field on object with known structure + let errors = check_code("{a: 1, b: 2}.a"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_field_access_on_object_with_unknown_fields() { + // Object comprehension has unknown fields - no error + let errors = check_code("{ [x]: x for x in ['a', 'b'] }.c"); + assert!( + errors.is_empty(), + "expected no errors for object comprehension, got: {errors:?}" + ); + } + + #[test] + fn test_field_access_on_merged_objects() { + // Merged objects with known fields - we track the combined fields + let errors = check_code("({a: 1} + {b: 2}).c"); + // Field access on merged object with known fields should error + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, available, .. }, + .. + }] if field == "c" && available == &["a", "b"] + ), + "expected NoSuchField error for field 'c', got: {errors:?}" + ); + } + + #[test] + fn test_valid_field_access_on_merged_objects() { + // Merged objects - accessing known fields should succeed + let errors = check_code("({a: 1} + {b: 2}).a"); + assert!( + errors.is_empty(), + "expected no errors for valid field access on merged object, got: {errors:?}" + ); + + let errors = check_code("({a: 1} + {b: 2}).b"); + assert!( + errors.is_empty(), + "expected no errors for valid field access on merged object, got: {errors:?}" + ); + } + + #[test] + fn test_merged_object_field_override() { + // When merging, right operand fields override left + // Both have field 'a', result should have 'a' and 'b' + let errors = check_code("({a: 1} + {a: 'str', b: 2}).c"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, available, .. }, + .. + }] if field == "c" && available == &["a", "b"] + ), + "expected NoSuchField error for field 'c', got: {errors:?}" + ); + } + + #[test] + fn test_no_such_field_on_local_object() { + // Access non-existent field on local object binding + let errors = check_code("local obj = {x: 1}; obj.y"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, available, .. }, + .. + }] if field == "y" && available == &["x"] + ), + "expected NoSuchField error for field 'y', got: {errors:?}" + ); + } + + #[test] + fn test_tuple_index_in_bounds() { + // Valid tuple index access + assert_eq!(check_code("[1, 2, 3][0]").as_slice(), &[]); + assert_eq!(check_code("[1, 2, 3][2]").as_slice(), &[]); + } + + #[test] + fn test_tuple_index_out_of_bounds() { + // Index out of bounds on tuple + let errors = check_code("[1, 2, 3][5]"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TupleIndexOutOfBounds { + tuple_len: 3, + index: 5 + }, + .. + }] + ), + "expected TupleIndexOutOfBounds error, got: {errors:?}" + ); + } + + #[test] + fn test_tuple_negative_index_ignored() { + // Negative indices are not statically checked + assert_eq!(check_code("[1, 2, 3][-1]").as_slice(), &[]); + } + + #[test] + fn test_tuple_non_constant_index_ignored() { + // Non-constant indices can't be checked statically + assert_eq!(check_code("local i = 5; [1, 2, 3][i]").as_slice(), &[]); + } + + // Format string validation tests + + #[test] + fn test_format_valid_string() { + // Valid format string with correct arguments + let errors = check_code(r#"std.format("Hello %s!", "world")"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors, got: {errors:?}" + ); + } + + #[test] + fn test_format_valid_multiple_args() { + // Valid format with multiple arguments + let errors = check_code(r#"std.format("%s has %d apples", "Alice", 5)"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors, got: {errors:?}" + ); + } + + #[test] + fn test_format_invalid_specifier() { + // Unknown format specifier + let errors = check_code(r#"std.format("%z", 1)"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatStringError { + parse_error: crate::format_check::FormatParseError::UnknownSpecifier('z') + }, + .. + }] + ), + "expected FormatStringError for unknown specifier, got: {errors:?}" + ); + } + + #[test] + fn test_format_too_few_args() { + // Not enough arguments for placeholders + let errors = check_code(r#"std.format("%s %s", "one")"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatArgCount { + expected: 2, + provided: 1 + }, + .. + }] + ), + "expected FormatArgCount error, got: {errors:?}" + ); + } + + #[test] + fn test_format_too_many_args() { + // Too many arguments for placeholders + let errors = check_code(r#"std.format("%s", "one", "two", "three")"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatArgCount { + expected: 1, + provided: 3 + }, + .. + }] + ), + "expected FormatArgCount error, got: {errors:?}" + ); + } + + #[test] + fn test_format_type_mismatch_number() { + // %d expects number, got string + let errors = check_code(r#"std.format("%d", "not a number")"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatArgTypeMismatch { + index: 0, + specifier: 'd', + .. + }, + .. + }] + ), + "expected FormatArgTypeMismatch for %d with string, got: {errors:?}" + ); + } + + #[test] + fn test_format_string_accepts_any() { + // %s accepts any type - number is fine + let errors = check_code(r#"std.format("%s", 42)"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for %s with number, got: {errors:?}" + ); + } + + #[test] + fn test_format_escaped_percent() { + // %% doesn't count as a placeholder + let errors = check_code(r#"std.format("100%% complete")"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for escaped percent, got: {errors:?}" + ); + } + + #[test] + fn test_format_incomplete_specifier() { + // Incomplete format specifier at end + let errors = check_code(r#"std.format("Hello %")"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatStringError { + parse_error: crate::format_check::FormatParseError::IncompleteSpecifier + }, + .. + }] + ), + "expected FormatStringError for incomplete specifier, got: {errors:?}" + ); + } + + #[test] + fn test_format_with_width_precision() { + // Format with width and precision modifiers + let errors = check_code(r#"std.format("%10.2f", 3.14159)"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for format with width/precision, got: {errors:?}" + ); + } + + #[test] + fn test_format_non_literal_string() { + // Can't validate non-literal format strings + let errors = check_code(r#"local fmt = "%s"; std.format(fmt, "hello")"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for non-literal format string, got: {errors:?}" + ); + } + + #[test] + fn test_format_text_block_literal_valid() { + let errors = check_code( + r#"std.format(||| + %s has %d apples +|||, "Alice", 5)"#, + ); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for text-block format string, got: {errors:?}" + ); + } + + #[test] + fn test_format_text_block_literal_arg_count_error() { + let errors = check_code( + r#"std.format(||| + %s %d +|||, "one")"#, + ); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatArgCount { + expected: 2, + provided: 1 + }, + .. + }] + ), + "expected FormatArgCount error for text-block format string, got: {errors:?}" + ); + } + + #[test] + fn test_no_such_field_with_suggestion() { + // Typo should trigger "did you mean" suggestion + let errors = check_code("{length: 1, width: 2}.lenght"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, suggestion: Some(suggestion), .. }, + .. + }] if field == "lenght" && suggestion == "length" + ); + } + + #[test] + fn test_no_such_field_no_suggestion_for_unrelated() { + // Completely different field name should not have suggestion + let errors = check_code("{a: 1, b: 2}.xyz"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, suggestion: None, .. }, + .. + }] if field == "xyz" + ); + } + + // Argument type validation tests + + #[test] + fn test_stdlib_arg_type_mismatch() { + // std.length expects an array, string, or object, not a number + let errors = check_code("std.length(42)"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::ArgumentTypeMismatch { function_name, param_name, .. }, + .. + }] if function_name == "std.length" && param_name == "x" + ); + } + + #[test] + fn test_stdlib_arg_type_valid() { + // std.length with valid array argument should produce no errors + let errors = check_code("std.length([1, 2, 3])"); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors, got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_higher_order_accepts_any_function() { + // std.map should accept any function, not just function() + let errors = check_code("std.map(function(x) x + 1, [1, 2, 3])"); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for std.map with function(x), got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_higher_order_rejects_non_function() { + // std.map's first arg must be a function, not a number + let errors = check_code("std.map(42, [1, 2, 3])"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::ArgumentTypeMismatch { function_name, param_name, .. }, + .. + }] if function_name == "std.map" && param_name == "func" + ); + } + + // Higher-order callback type validation tests + + #[test] + fn test_callback_type_valid_map_with_any_param() { + // Callback with untyped param (Any) should accept anything + // User-defined functions don't have type annotations in Jsonnet + let errors = check_code("std.map(function(x) x + 1, [1, 2, 3])"); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for std.map with untyped callback, got: {errors:?}" + ); + } + + #[test] + fn test_callback_type_valid_map_std_length_with_arrays() { + // std.length accepts arrays, so passing array elements to it is valid + let errors = check_code("std.map(std.length, [[1, 2], [3, 4, 5]])"); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for std.map(std.length, [[...], ...]), got: {errors:?}" + ); + } + + #[test] + fn test_callback_type_valid_map_std_length_with_strings() { + // std.length accepts strings + let errors = check_code(r#"std.map(std.length, ["hello", "world"])"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for std.map(std.length, [...strings...]), got: {errors:?}" + ); + } + + #[test] + fn test_callback_type_mismatch_map_std_length_with_numbers() { + // std.length does NOT accept numbers, so passing number array should error + let errors = check_code("std.map(std.length, [1, 2, 3])"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::CallbackTypeMismatch { function_name, callback_param, .. }, + .. + }] if function_name == "std.map" && callback_param == "func" + ); + } + + #[test] + fn test_callback_type_mismatch_map_std_length_with_booleans() { + // std.length does NOT accept booleans + let errors = check_code("std.map(std.length, [true, false])"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::CallbackTypeMismatch { function_name, callback_param, .. }, + .. + }] if function_name == "std.map" && callback_param == "func" + ); + } + + #[test] + fn test_apply_substitution() { + use assert_matches::assert_matches; + use jrsonnet_lsp_types::{GlobalTyStore, LocalTyStore, Ty, TyData, TySubst}; + use rowan::TextRange; + + // Create a local type and a substitution mapping it to a global type + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + let local_arr = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + assert!(local_arr.is_local()); + + let subst = TySubst::merge(&global, &local); + let global_arr = subst.apply(local_arr); + assert!(global_arr.is_global()); + + // Create an error with the local type + let error = TypeError { + kind: TypeErrorKind::FieldAccessOnNonObject { actual: local_arr }, + range: TextRange::new(0.into(), 10.into()), + }; + + // Apply substitution + let substituted = error.apply_substitution(&subst); + + // Verify the type was substituted and range is preserved + assert_matches!( + substituted, + TypeError { + kind: TypeErrorKind::FieldAccessOnNonObject { actual }, + range, + } if actual == global_arr && actual.is_global() && range == error.range + ); + } + + #[test] + fn test_apply_substitution_argument_mismatch() { + use assert_matches::assert_matches; + use jrsonnet_lsp_types::{GlobalTyStore, LocalTyStore, Ty, TyData, TySubst}; + use rowan::TextRange; + + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create two local types + let expected_local = local.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + let actual_local = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + let subst = TySubst::merge(&global, &local); + let expected_global = subst.apply(expected_local); + let actual_global = subst.apply(actual_local); + + // Create an error with both local types + let error = TypeError { + kind: TypeErrorKind::ArgumentTypeMismatch { + function_name: "myFunc".to_string(), + param_name: "arr".to_string(), + param_index: 0, + expected: expected_local, + actual: actual_local, + }, + range: TextRange::new(5.into(), 15.into()), + }; + + // Apply substitution + let substituted = error.apply_substitution(&subst); + + // Verify both types were substituted + assert_matches!( + substituted, + TypeError { + kind: TypeErrorKind::ArgumentTypeMismatch { + function_name, + param_name, + param_index: 0, + expected, + actual, + }, + .. + } if function_name == "myFunc" + && param_name == "arr" + && expected == expected_global + && actual == actual_global + && expected.is_global() + && actual.is_global() + ); + } + + #[test] + fn test_apply_substitution_no_ty_fields() { + use assert_matches::assert_matches; + use jrsonnet_lsp_types::TySubst; + use rowan::TextRange; + + // Error kinds without Ty fields should be unchanged + let subst = TySubst::new(); + + let error = TypeError { + kind: TypeErrorKind::NoSuchField { + field: "foo".to_string(), + available: vec!["bar".to_string(), "baz".to_string()], + suggestion: Some("bar".to_string()), + }, + range: TextRange::new(0.into(), 5.into()), + }; + + let substituted = error.apply_substitution(&subst); + + assert_matches!( + substituted, + TypeError { + kind: TypeErrorKind::NoSuchField { + field, + available, + suggestion: Some(suggested), + }, + .. + } if field == "foo" + && available == vec!["bar".to_string(), "baz".to_string()] + && suggested == "bar" + ); + } +} diff --git a/crates/jrsonnet-lsp-check/src/type_check/mod.rs b/crates/jrsonnet-lsp-check/src/type_check/mod.rs new file mode 100644 index 00000000..c1bb3e02 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/type_check/mod.rs @@ -0,0 +1,13 @@ +//! Static type checking for Jsonnet expressions. +//! +//! Provides type error diagnostics that detect type mismatches before +//! evaluation: operator mismatches, invalid field/index access, and +//! function call validation. + +mod calls; +mod core; +mod types; + +pub use core::check_types; + +pub use types::{TypeCheckConfig, TypeCheckRule, TypeError, TypeErrorKind}; diff --git a/crates/jrsonnet-lsp-check/src/type_check/types.rs b/crates/jrsonnet-lsp-check/src/type_check/types.rs new file mode 100644 index 00000000..90eeb16d --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/type_check/types.rs @@ -0,0 +1,486 @@ +use jrsonnet_lsp_document::{to_lsp_range, LineIndex}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_types::{BinaryOpTypeError, Ty, UnaryOpTypeError}; +use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString}; +use rowan::TextRange; + +use crate::format_check::FormatParseError; + +/// A type error detected during static analysis. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TypeError { + /// The kind of type error. + pub kind: TypeErrorKind, + /// The source location of the error. + pub range: TextRange, +} + +/// The kind of type error. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum TypeErrorKind { + /// Binary operator applied to incompatible types. + BinaryOpMismatch { + lhs: Ty, + rhs: Ty, + op: &'static str, + reason: BinaryOpTypeError, + }, + /// Unary operator applied to incompatible type. + UnaryOpMismatch { + operand: Ty, + op: &'static str, + reason: UnaryOpTypeError, + }, + /// Field access (`.field`) on a non-object type. + FieldAccessOnNonObject { actual: Ty }, + /// Index access (`[i]`) on a non-indexable type. + IndexOnNonIndexable { actual: Ty }, + /// Function call on a non-callable type. + CallOnNonFunction { actual: Ty }, + /// Wrong number of arguments to function. + WrongArgCount { expected: usize, actual: usize }, + /// Too few arguments to function. + TooFewArguments { + function_name: String, + required: usize, + provided: usize, + }, + /// Too many arguments to function. + TooManyArguments { + function_name: String, + max_allowed: usize, + provided: usize, + }, + /// Named argument does not match any declared parameter. + UnknownNamedArgument { + function_name: String, + arg_name: String, + expected: Vec, + }, + /// Access to non-existent field on object with known structure. + NoSuchField { + field: String, + available: Vec, + suggestion: Option, + }, + /// Index out of bounds on a tuple with known length. + TupleIndexOutOfBounds { tuple_len: usize, index: usize }, + /// Format string parse error. + FormatStringError { parse_error: FormatParseError }, + /// Wrong number of format arguments. + FormatArgCount { expected: usize, provided: usize }, + /// Format argument type mismatch. + FormatArgTypeMismatch { + index: usize, + expected: Ty, + actual: Ty, + specifier: char, + }, + /// Function argument type mismatch. + ArgumentTypeMismatch { + function_name: String, + param_name: String, + param_index: usize, + expected: Ty, + actual: Ty, + }, + /// Callback function parameter type mismatch with collection element type. + CallbackTypeMismatch { + function_name: String, + callback_param: String, + element_type: Ty, + callback_param_type: Ty, + }, +} + +impl TypeErrorKind { + /// Apply a type substitution to all `Ty` references in this error kind. + /// + /// This is used when merging local types into the global store - the substitution + /// maps local `Ty` values to their global equivalents. + #[must_use] + pub fn apply_substitution(&self, subst: &jrsonnet_lsp_types::TySubst) -> Self { + match self { + TypeErrorKind::BinaryOpMismatch { + lhs, + rhs, + op, + reason, + } => TypeErrorKind::BinaryOpMismatch { + lhs: subst.apply(*lhs), + rhs: subst.apply(*rhs), + op, + reason: *reason, + }, + TypeErrorKind::UnaryOpMismatch { + operand, + op, + reason, + } => TypeErrorKind::UnaryOpMismatch { + operand: subst.apply(*operand), + op, + reason: *reason, + }, + TypeErrorKind::FieldAccessOnNonObject { actual } => { + TypeErrorKind::FieldAccessOnNonObject { + actual: subst.apply(*actual), + } + } + TypeErrorKind::IndexOnNonIndexable { actual } => TypeErrorKind::IndexOnNonIndexable { + actual: subst.apply(*actual), + }, + TypeErrorKind::CallOnNonFunction { actual } => TypeErrorKind::CallOnNonFunction { + actual: subst.apply(*actual), + }, + // These variants have no Ty references + TypeErrorKind::WrongArgCount { .. } + | TypeErrorKind::TooFewArguments { .. } + | TypeErrorKind::TooManyArguments { .. } + | TypeErrorKind::UnknownNamedArgument { .. } + | TypeErrorKind::NoSuchField { .. } + | TypeErrorKind::TupleIndexOutOfBounds { .. } + | TypeErrorKind::FormatStringError { .. } + | TypeErrorKind::FormatArgCount { .. } => self.clone(), + TypeErrorKind::FormatArgTypeMismatch { + index, + expected, + actual, + specifier, + } => TypeErrorKind::FormatArgTypeMismatch { + index: *index, + expected: subst.apply(*expected), + actual: subst.apply(*actual), + specifier: *specifier, + }, + TypeErrorKind::ArgumentTypeMismatch { + function_name, + param_name, + param_index, + expected, + actual, + } => TypeErrorKind::ArgumentTypeMismatch { + function_name: function_name.clone(), + param_name: param_name.clone(), + param_index: *param_index, + expected: subst.apply(*expected), + actual: subst.apply(*actual), + }, + TypeErrorKind::CallbackTypeMismatch { + function_name, + callback_param, + element_type, + callback_param_type, + } => TypeErrorKind::CallbackTypeMismatch { + function_name: function_name.clone(), + callback_param: callback_param.clone(), + element_type: subst.apply(*element_type), + callback_param_type: subst.apply(*callback_param_type), + }, + } + } +} + +impl TypeError { + /// Apply a type substitution to all `Ty` references in this error. + /// + /// This is used when merging local types into the global store - the substitution + /// maps local `Ty` values to their global equivalents. + #[must_use] + pub fn apply_substitution(&self, subst: &jrsonnet_lsp_types::TySubst) -> Self { + Self { + kind: self.kind.apply_substitution(subst), + range: self.range, + } + } + + /// Convert the type error to an LSP diagnostic. + pub fn to_diagnostic( + &self, + line_index: &LineIndex, + text: &str, + analysis: &TypeAnalysis, + ) -> Diagnostic { + let message = render_type_error(&self.kind, analysis); + + Diagnostic { + range: to_lsp_range(self.range, line_index, text), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("type-error".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message, + related_information: None, + tags: None, + data: None, + } + } +} + +fn render_type_error(kind: &TypeErrorKind, analysis: &TypeAnalysis) -> String { + match kind { + TypeErrorKind::BinaryOpMismatch { + lhs, + rhs, + op, + reason, + } => render_binary_op_mismatch(*lhs, *rhs, op, *reason, analysis), + TypeErrorKind::UnaryOpMismatch { + operand, + op, + reason, + } => render_unary_op_mismatch(*operand, op, *reason, analysis), + TypeErrorKind::FieldAccessOnNonObject { actual } => { + format!( + "field access on non-object type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::IndexOnNonIndexable { actual } => { + format!( + "index access on non-indexable type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::CallOnNonFunction { actual } => { + format!( + "cannot call non-function type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::WrongArgCount { expected, actual } => { + format!("function expects {expected} argument(s), but {actual} provided") + } + TypeErrorKind::TooFewArguments { + function_name, + required, + provided, + } => { + format!( + "`{function_name}` requires at least {required} argument(s), but {provided} provided" + ) + } + TypeErrorKind::TooManyArguments { + function_name, + max_allowed, + provided, + } => { + format!( + "`{function_name}` accepts at most {max_allowed} argument(s), but {provided} provided" + ) + } + TypeErrorKind::UnknownNamedArgument { + function_name, + arg_name, + expected, + } => { + let mut message = format!("`{function_name}` has no parameter named `{arg_name}`"); + if !expected.is_empty() { + message.push_str("; expected one of: "); + message.push_str( + &expected + .iter() + .map(|name| format!("`{name}`")) + .collect::>() + .join(", "), + ); + } + message + } + TypeErrorKind::NoSuchField { + field, + available, + suggestion, + } => { + let mut msg = format!("no such field `{field}`"); + if let Some(suggested) = suggestion { + msg.push_str("; did you mean `"); + msg.push_str(suggested); + msg.push_str("`?"); + } else if !available.is_empty() { + let available_str = available.join(", "); + msg.push_str("; available fields: "); + msg.push_str(&available_str); + } + msg + } + TypeErrorKind::TupleIndexOutOfBounds { tuple_len, index } => { + format!("index {index} is out of bounds for tuple of length {tuple_len}") + } + TypeErrorKind::FormatStringError { parse_error } => match parse_error { + FormatParseError::IncompleteSpecifier => { + "invalid format string: incomplete format specifier".to_string() + } + FormatParseError::UnknownSpecifier(specifier) => { + format!("invalid format string: unknown specifier '%{specifier}'") + } + FormatParseError::UnclosedNamedPlaceholder => { + "invalid format string: unclosed named placeholder".to_string() + } + FormatParseError::EmptyName => { + "invalid format string: empty name in named placeholder".to_string() + } + FormatParseError::MixedPositionalAndNamed => { + "invalid format string: cannot mix positional and named placeholders".to_string() + } + }, + TypeErrorKind::FormatArgCount { expected, provided } => { + format!("format string expects {expected} argument(s), but {provided} provided") + } + TypeErrorKind::FormatArgTypeMismatch { + index, + expected, + actual, + specifier, + } => { + format!( + "format argument {} (specifier %{}) expects `{}`, got `{}`", + index + 1, + specifier, + analysis.display(*expected), + analysis.display(*actual) + ) + } + TypeErrorKind::ArgumentTypeMismatch { + function_name, + param_name, + param_index, + expected, + actual, + } => { + format!( + "`{}` argument {} (`{}`) expects `{}`, got `{}`", + function_name, + param_index + 1, + param_name, + analysis.display(*expected), + analysis.display(*actual) + ) + } + TypeErrorKind::CallbackTypeMismatch { + function_name, + callback_param, + element_type, + callback_param_type, + } => { + format!( + "`{}` callback parameter `{}` has type `{}`, but array elements have type `{}`", + function_name, + callback_param, + analysis.display(*callback_param_type), + analysis.display(*element_type) + ) + } + } +} + +fn render_binary_op_mismatch( + lhs: Ty, + rhs: Ty, + op: &str, + reason: BinaryOpTypeError, + analysis: &TypeAnalysis, +) -> String { + match reason { + BinaryOpTypeError::RequiresNumberPair => format!( + "operator `{op}` requires (number, number), got ({}, {})", + analysis.display(lhs), + analysis.display(rhs) + ), + BinaryOpTypeError::InvalidPlusOperands => format!( + "operator `+` requires matching types (number+number, string+string, array+array, or object+object), got ({}, {})", + analysis.display(lhs), + analysis.display(rhs) + ), + BinaryOpTypeError::RequiresBitwiseNumberPair => format!( + "bitwise operator `{op}` requires (number, number), got ({}, {})", + analysis.display(lhs), + analysis.display(rhs) + ), + BinaryOpTypeError::RequiresStringAndObject => format!( + "operator `in` requires (string, object), got ({}, {})", + analysis.display(lhs), + analysis.display(rhs) + ), + } +} + +fn render_unary_op_mismatch( + operand: Ty, + op: &str, + reason: UnaryOpTypeError, + analysis: &TypeAnalysis, +) -> String { + match reason { + UnaryOpTypeError::NotRequiresBoolean => { + format!( + "operator `{op}` requires boolean, got {}", + analysis.display(operand) + ) + } + UnaryOpTypeError::MinusRequiresNumber | UnaryOpTypeError::BitNotRequiresNumber => { + format!( + "operator `{op}` requires number, got {}", + analysis.display(operand) + ) + } + } +} + +/// Configuration for type checking. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum TypeCheckRule { + BinaryOps, + UnaryOps, + FieldAccess, + IndexAccess, + CallChecks, +} + +impl TypeCheckRule { + const fn bit(self) -> u8 { + match self { + Self::BinaryOps => 1 << 0, + Self::UnaryOps => 1 << 1, + Self::FieldAccess => 1 << 2, + Self::IndexAccess => 1 << 3, + Self::CallChecks => 1 << 4, + } + } +} + +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct TypeCheckConfig { + enabled: u8, +} + +impl TypeCheckConfig { + /// Enable a single type-check rule in this config. + #[must_use] + pub fn with_enabled(mut self, rule: TypeCheckRule) -> Self { + self.enable(rule); + self + } + + /// Enable a single type-check rule in this config. + pub fn enable(&mut self, rule: TypeCheckRule) { + self.enabled |= rule.bit(); + } + + /// Check whether a type-check rule is enabled. + #[must_use] + pub fn is_enabled(&self, rule: TypeCheckRule) -> bool { + self.enabled & rule.bit() != 0 + } + + /// Create a config with all checks enabled. + #[must_use] + pub fn all() -> Self { + Self { + enabled: TypeCheckRule::BinaryOps.bit() + | TypeCheckRule::UnaryOps.bit() + | TypeCheckRule::FieldAccess.bit() + | TypeCheckRule::IndexAccess.bit() + | TypeCheckRule::CallChecks.bit(), + } + } +} diff --git a/crates/jrsonnet-lsp-document/Cargo.toml b/crates/jrsonnet-lsp-document/Cargo.toml new file mode 100644 index 00000000..f9dcf648 --- /dev/null +++ b/crates/jrsonnet-lsp-document/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "jrsonnet-lsp-document" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Document parsing, position conversion, and AST utilities for jrsonnet LSP" + +[dependencies] +derive_more = { version = "1", features = ["full"] } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +lsp-types.workspace = true +parking_lot.workspace = true +rowan.workspace = true +thiserror.workspace = true +url.workspace = true + +[dev-dependencies] +assert_matches = "1.5.0" + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-document/src/ast_utils.rs b/crates/jrsonnet-lsp-document/src/ast_utils.rs new file mode 100644 index 00000000..1288a3c5 --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/ast_utils.rs @@ -0,0 +1,134 @@ +//! Shared AST utility functions. +//! +//! This module provides common utility functions for working with the Jsonnet AST +//! that are used across multiple handlers. + +use jrsonnet_rowan_parser::{SyntaxNode, SyntaxToken}; +use lsp_types::Range; +use rowan::TextRange; + +use crate::{ByteOffset, LineIndex, LspPosition}; + +/// Strip quotes from a string literal, handling all Jsonnet string formats. +/// +/// Handles: +/// - Double-quoted: `"foo"` → `foo` +/// - Single-quoted: `'foo'` → `foo` +/// - Verbatim double: `@"foo"` → `foo` +/// - Verbatim single: `@'foo'` → `foo` +#[must_use] +pub fn strip_string_quotes(s: &str) -> String { + s.trim_start_matches('@') + .trim_start_matches('"') + .trim_start_matches('\'') + .trim_end_matches('"') + .trim_end_matches('\'') + .to_string() +} + +/// Find the token at the given byte offset, preferring the rightmost token +/// when the offset is between two tokens. +#[must_use] +pub fn token_at_offset(root: &SyntaxNode, offset: ByteOffset) -> Option { + root.token_at_offset(rowan::TextSize::from(u32::from(offset))) + .right_biased() +} + +/// Convert a rowan `TextRange` to an LSP `Range`. +/// +/// This handles the conversion from byte offsets to LSP positions +/// (line number and UTF-16 character offset). +#[must_use] +pub fn to_lsp_range(range: TextRange, line_index: &LineIndex, text: &str) -> Range { + let start = line_index + .position(range.start().into(), text) + .unwrap_or_default(); + let end = line_index + .position(range.end().into(), text) + .unwrap_or_default(); + + Range { + start: start.into(), + end: end.into(), + } +} + +/// Find the deepest node containing the given offset. +/// +/// This is useful when the cursor is at whitespace or between tokens, +/// where `token_at_offset` would return `None`. +#[must_use] +pub fn find_node_at_offset(root: &SyntaxNode, offset: ByteOffset) -> Option { + let text_size = rowan::TextSize::from(u32::from(offset)); + + // Find the deepest node that contains this offset + let mut result = None; + for node in root.descendants() { + if node.text_range().contains_inclusive(text_size) { + result = Some(node); + } + } + result +} + +/// Convert an LSP position to a byte offset in the document. +/// +/// Returns `None` if the position is invalid. +#[must_use] +pub fn position_to_offset( + line_index: &LineIndex, + position: LspPosition, + text: &str, +) -> Option { + line_index.offset(position, text) +} + +#[cfg(test)] +mod tests { + use jrsonnet_rowan_parser::AstNode; + + use super::*; + use crate::{DocVersion, Document}; + + #[test] + fn test_token_at_offset() { + let code = "local x = 1;"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Token at 'x' (offset 6) + let token = token_at_offset(ast.syntax(), ByteOffset::from(6u32)) + .expect("should find token at offset 6"); + assert_eq!(token.text(), "x"); + } + + #[test] + fn test_to_lsp_range() { + let code = "local x = 1;"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let line_index = doc.line_index(); + + // Range for "local" + let range = TextRange::new(0.into(), 5.into()); + let lsp_range = to_lsp_range(range, line_index, code); + + assert_eq!(lsp_range.start.line, 0); + assert_eq!(lsp_range.start.character, 0); + assert_eq!(lsp_range.end.line, 0); + assert_eq!(lsp_range.end.character, 5); + } + + #[test] + fn test_strip_string_quotes() { + // Double-quoted strings + assert_eq!(strip_string_quotes(r#""foo.jsonnet""#), "foo.jsonnet"); + // Single-quoted strings + assert_eq!(strip_string_quotes("'bar.jsonnet'"), "bar.jsonnet"); + // Verbatim double-quoted strings + assert_eq!(strip_string_quotes(r#"@"baz.jsonnet""#), "baz.jsonnet"); + // Verbatim single-quoted strings + assert_eq!(strip_string_quotes("@'qux.jsonnet'"), "qux.jsonnet"); + // No quotes (edge case) + assert_eq!(strip_string_quotes("raw"), "raw"); + } +} diff --git a/crates/jrsonnet-lsp-document/src/config.rs b/crates/jrsonnet-lsp-document/src/config.rs new file mode 100644 index 00000000..df1a38f2 --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/config.rs @@ -0,0 +1,12 @@ +//! Configuration constants for LSP analysis. +//! +//! Centralizes cache capacities and other tunable parameters. + +/// Default capacity for the closed document cache in `DocumentManager`. +pub const DEFAULT_CLOSED_CACHE_CAPACITY: usize = 100; + +/// Default capacity for the type analysis cache in `DocumentManager`. +pub const DEFAULT_ANALYSIS_CACHE_CAPACITY: usize = 100; + +/// Default capacity for the shared type cache. +pub const DEFAULT_TYPE_CACHE_CAPACITY: usize = 500; diff --git a/crates/jrsonnet-lsp-document/src/document.rs b/crates/jrsonnet-lsp-document/src/document.rs new file mode 100644 index 00000000..c9687e94 --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/document.rs @@ -0,0 +1,519 @@ +//! Document representation and parsing. +//! +//! Wraps the jrsonnet-rowan-parser to provide error-tolerant AST +//! and associated metadata for IDE features. + +use std::{collections::HashSet, ops::Deref, sync::Arc}; + +/// Re-export the parser's syntax error type for use in LSP. +pub use jrsonnet_rowan_parser::LocatedSyntaxError as SyntaxError; +use jrsonnet_rowan_parser::{nodes::SourceFile, GreenNode}; + +use crate::{position::LineIndex, types::DocVersion}; + +/// A parsed Jsonnet document containing the AST and any syntax errors. +/// +/// Uses error-tolerant parsing to produce a partial AST even when the +/// document contains syntax errors. This enables IDE features to work +/// on incomplete or invalid code. +/// +/// Stores a `GreenNode` (thread-safe) and creates `SourceFile` on demand. +/// This allows `ParsedDocument` to be `Send + Sync` for parallel processing. +#[derive(Debug, Clone)] +pub struct ParsedDocument { + green: GreenNode, + errors: Arc<[SyntaxError]>, +} + +impl ParsedDocument { + /// Parse a document from source text. + #[must_use] + pub fn parse(text: &str) -> Self { + let (green, errors) = jrsonnet_rowan_parser::parse_green(text); + Self { + green, + errors: errors.into(), + } + } + + /// Get the AST. Creates a fresh `SourceFile` cursor on each call. + #[must_use] + pub fn ast(&self) -> SourceFile { + jrsonnet_rowan_parser::source_file_from_green(&self.green) + } + + /// Get syntax errors. + #[must_use] + pub fn errors(&self) -> &[SyntaxError] { + &self.errors + } + + /// Check if the document has any syntax errors. + #[must_use] + pub fn has_errors(&self) -> bool { + !self.errors.is_empty() + } +} + +/// A document with all computed artifacts. +/// +/// This is the cached representation of an open document, +/// containing the source text and computed metadata. +/// Cloning is cheap and shares the underlying data. +#[derive(Debug, Clone)] +pub struct Document { + /// The source text of the document. + text: Arc, + /// Document version from the editor. + version: DocVersion, + /// Parsed AST and errors. + parsed: ParsedDocument, + /// Line index for position conversion. + line_index: Arc, + /// Last successful parse for graceful degradation. + /// Used when current parse has errors. + last_good_parse: Option, + /// Last good line index (corresponding to `last_good_parse`). + last_good_line_index: Option>, + /// Lines that have changed since `last_good_parse`. + /// If None, no tracking is active (current parse is good). + dirty_lines: Option>, +} + +impl Document { + /// Create a new document from source text. + #[must_use] + pub fn new(text: String, version: DocVersion) -> Self { + let line_index = Arc::new(LineIndex::new(&text)); + let parsed = ParsedDocument::parse(&text); + Self { + text: text.into(), + version, + parsed, + line_index, + last_good_parse: None, + last_good_line_index: None, + dirty_lines: None, + } + } + + /// Get the source text. + #[must_use] + pub fn text(&self) -> &str { + &self.text + } + + /// Get the document version. + #[must_use] + pub fn version(&self) -> DocVersion { + self.version + } + + /// Get the line index. + #[must_use] + pub fn line_index(&self) -> &LineIndex { + &self.line_index + } + + /// Update the document with new text (full sync). + /// + /// This replaces the document content and re-parses. + /// Due to Arc, any clones will continue to reference the old data. + pub fn update(&mut self, text: String, version: DocVersion) { + // Save the current state if it's error-free (for graceful degradation) + if !self.parsed.has_errors() { + self.last_good_parse = Some(self.parsed.clone()); + self.last_good_line_index = Some(self.line_index.clone()); + } + + self.line_index = Arc::new(LineIndex::new(&text)); + self.parsed = ParsedDocument::parse(&text); + self.text = text.into(); + self.version = version; + + // Update dirty line tracking + if self.parsed.has_errors() { + // Full replacement means all lines are potentially dirty + let line_count = self.line_index.line_count(); + self.dirty_lines = Some((0..line_count).collect()); + } else { + // Parse succeeded, clear dirty tracking + self.dirty_lines = None; + self.last_good_parse = None; + self.last_good_line_index = None; + } + } + + /// Apply an incremental change to the document. + /// + /// Takes an LSP range and new text, applies the change, and re-parses. + /// Returns true if the change was applied successfully. + pub fn apply_incremental_change( + &mut self, + range: lsp_types::Range, + new_text: &str, + version: DocVersion, + ) -> bool { + use crate::types::LspPosition; + + // Save the current state if it's error-free (for graceful degradation) + if !self.parsed.has_errors() { + self.last_good_parse = Some(self.parsed.clone()); + self.last_good_line_index = Some(self.line_index.clone()); + self.dirty_lines = Some(HashSet::new()); + } + + // Convert LSP range to byte offsets + let start_pos = LspPosition::from(range.start); + let end_pos = LspPosition::from(range.end); + + let start_offset = match self.line_index.offset(start_pos, &self.text) { + Some(o) => usize::from(o), + None => return false, + }; + let end_offset = match self.line_index.offset(end_pos, &self.text) { + Some(o) => usize::from(o), + None => return false, + }; + + // Validate offsets + if start_offset > end_offset || end_offset > self.text.len() { + return false; + } + + // Track which lines are affected by this change + let start_line = range.start.line; + let end_line = range.end.line; + let new_line_count = u32::try_from(new_text.matches('\n').count()).unwrap_or(u32::MAX); + let affected_lines = end_line.saturating_sub(start_line) + new_line_count + 1; + + // Apply the text change + let mut text = self.text.to_string(); + text.replace_range(start_offset..end_offset, new_text); + + // Rebuild + self.line_index = Arc::new(LineIndex::new(&text)); + self.parsed = ParsedDocument::parse(&text); + self.text = text.into(); + self.version = version; + + // Update dirty line tracking + if self.parsed.has_errors() { + // Mark affected lines as dirty + if let Some(ref mut dirty) = self.dirty_lines { + for line in start_line..start_line.saturating_add(affected_lines) { + dirty.insert(line); + } + } + } else { + // Parse succeeded, clear dirty tracking + self.dirty_lines = None; + self.last_good_parse = None; + self.last_good_line_index = None; + } + + true + } + + /// Get the AST for navigation purposes. + /// + /// Returns the current parse if successful, otherwise falls back to + /// the last good parse for graceful degradation on broken files. + #[must_use] + pub fn navigation_ast(&self) -> SourceFile { + if self.parsed.has_errors() { + if let Some(ref last_good) = self.last_good_parse { + return last_good.ast(); + } + } + self.parsed.ast() + } + + /// Get the line index for navigation purposes. + /// + /// Returns the current line index if parse is successful, otherwise + /// falls back to the last good line index. + #[must_use] + pub fn navigation_line_index(&self) -> &LineIndex { + if self.parsed.has_errors() { + if let Some(ref last_good) = self.last_good_line_index { + return last_good; + } + } + &self.line_index + } + + /// Check if a position is in a dirty (recently changed) region. + /// + /// Returns true if the line at the given position has been modified + /// since the last successful parse. + #[must_use] + pub fn is_position_dirty(&self, line: u32) -> bool { + self.dirty_lines.as_ref().is_some_and(|d| d.contains(&line)) + } + + /// Check if there are any dirty lines (broken state with pending changes). + #[must_use] + pub fn has_dirty_lines(&self) -> bool { + self.dirty_lines.as_ref().is_some_and(|d| !d.is_empty()) + } + + /// Get the set of dirty line numbers. + #[must_use] + pub fn dirty_lines(&self) -> Option<&HashSet> { + self.dirty_lines.as_ref() + } +} + +impl Deref for Document { + type Target = ParsedDocument; + + fn deref(&self) -> &Self::Target { + &self.parsed + } +} + +/// Thread-safe document wrapper using Arc. +pub type SharedDocument = std::sync::Arc; + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_valid_document() { + let text = r#"{ hello: "world" }"#; + let doc = Document::new(text.to_string(), DocVersion::new(1)); + + // Methods from ParsedDocument are accessed via Deref + assert_eq!(doc.errors(), &[], "valid document should have no errors"); + assert_eq!(doc.version(), DocVersion::new(1)); + } + + #[test] + fn test_parse_invalid_document() { + use jrsonnet_rowan_parser::{ExpectedSyntax, SyntaxError as ParserSyntaxError}; + use rowan::TextRange; + + let text = r"{ hello: }"; // Missing value after colon + let doc = Document::new(text.to_string(), DocVersion::new(1)); + + // Methods from ParsedDocument are accessed via Deref + assert_eq!( + doc.errors(), + &[SyntaxError { + error: ParserSyntaxError::Missing { + expected: ExpectedSyntax::Named("expression") + }, + range: TextRange::new(9.into(), 9.into()), + }] + ); + } + + #[test] + fn test_document_update() { + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + assert_eq!(doc.version(), DocVersion::new(1)); + + doc.update("{ a: 2 }".to_string(), DocVersion::new(2)); + assert_eq!(doc.version(), DocVersion::new(2)); + } + + #[test] + fn test_incremental_change_insert() { + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + + // Insert text at position (0, 7) - before the closing brace + let range = lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6, + }, + end: lsp_types::Position { + line: 0, + character: 6, + }, + }; + let success = doc.apply_incremental_change(range, ", b: 2", DocVersion::new(2)); + + assert!(success); + assert_eq!(doc.text(), "{ a: 1, b: 2 }"); + assert_eq!(doc.version(), DocVersion::new(2)); + } + + #[test] + fn test_incremental_change_replace() { + let mut doc = Document::new("{ hello: 1 }".to_string(), DocVersion::new(1)); + + // Replace "hello" with "world" + let range = lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 2, + }, + end: lsp_types::Position { + line: 0, + character: 7, + }, + }; + let success = doc.apply_incremental_change(range, "world", DocVersion::new(2)); + + assert!(success); + assert_eq!(doc.text(), "{ world: 1 }"); + } + + #[test] + fn test_incremental_change_delete() { + let mut doc = Document::new("{ a: 1, b: 2 }".to_string(), DocVersion::new(1)); + + // Delete ", b: 2" + let range = lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6, + }, + end: lsp_types::Position { + line: 0, + character: 12, + }, + }; + let success = doc.apply_incremental_change(range, "", DocVersion::new(2)); + + assert!(success); + assert_eq!(doc.text(), "{ a: 1 }"); + } + + #[test] + fn test_incremental_change_multiline() { + let mut doc = Document::new("{\n a: 1\n}".to_string(), DocVersion::new(1)); + + // Insert a new field on line 2 + let range = lsp_types::Range { + start: lsp_types::Position { + line: 1, + character: 6, + }, + end: lsp_types::Position { + line: 1, + character: 6, + }, + }; + let success = doc.apply_incremental_change(range, ",\n b: 2", DocVersion::new(2)); + + assert!(success); + assert_eq!(doc.text(), "{\n a: 1,\n b: 2\n}"); + } + + #[test] + fn test_incremental_change_invalid_range() { + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + + // Invalid range: start after end + let range = lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 5, + }, + end: lsp_types::Position { + line: 0, + character: 2, + }, + }; + let success = doc.apply_incremental_change(range, "test", DocVersion::new(2)); + + assert!(!success); + // Document should be unchanged + assert_eq!(doc.text(), "{ a: 1 }"); + assert_eq!(doc.version(), DocVersion::new(1)); + } + + #[test] + fn test_incremental_change_out_of_bounds() { + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + + // Out of bounds line + let range = lsp_types::Range { + start: lsp_types::Position { + line: 10, + character: 0, + }, + end: lsp_types::Position { + line: 10, + character: 5, + }, + }; + let success = doc.apply_incremental_change(range, "test", DocVersion::new(2)); + + assert!(!success); + assert_eq!(doc.text(), "{ a: 1 }"); + } + + #[test] + fn test_graceful_degradation_on_syntax_error() { + use jrsonnet_rowan_parser::AstNode; + + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + + // Initially should have no errors + assert!(!doc.has_errors()); + assert!(!doc.has_dirty_lines()); + + // Introduce a syntax error + doc.update("{ a: }".to_string(), DocVersion::new(2)); + + // Should have errors now + assert!(doc.has_errors()); + assert!(doc.has_dirty_lines()); + + // Should have a fallback AST + let nav_ast = doc.navigation_ast(); + // The fallback AST should still be usable (from the original good parse) + // and be different from the current broken AST + assert!(!doc.ast().syntax().text().to_string().is_empty()); + assert!(!nav_ast.syntax().text().to_string().is_empty()); + } + + #[test] + fn test_graceful_degradation_recovery() { + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + + // Introduce a syntax error + doc.update("{ a: }".to_string(), DocVersion::new(2)); + assert!(doc.has_errors()); + assert!(doc.has_dirty_lines()); + + // Fix the error + doc.update("{ a: 2 }".to_string(), DocVersion::new(3)); + + // Should no longer have errors or dirty lines + assert!(!doc.has_errors()); + assert!(!doc.has_dirty_lines()); + } + + #[test] + fn test_dirty_line_tracking() { + let mut doc = Document::new("{\n a: 1\n}".to_string(), DocVersion::new(1)); + + // Make an incremental change that causes an error + let range = lsp_types::Range { + start: lsp_types::Position { + line: 1, + character: 5, + }, + end: lsp_types::Position { + line: 1, + character: 6, + }, + }; + // Delete the "1" leaving "{ a: }" + doc.apply_incremental_change(range, "", DocVersion::new(2)); + + // Should have errors (incomplete expression) + assert!(doc.has_errors()); + + // Line 1 should be dirty + assert!(doc.is_position_dirty(1)); + // Line 0 should not be dirty + assert!(!doc.is_position_dirty(0)); + } +} diff --git a/crates/jrsonnet-lsp-document/src/error.rs b/crates/jrsonnet-lsp-document/src/error.rs new file mode 100644 index 00000000..a2c9863d --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/error.rs @@ -0,0 +1,229 @@ +//! Error types for the LSP server. +//! +//! Provides structured error types for better error handling and diagnostics. + +use thiserror::Error; + +/// Structured reasons why a Jsonnet identifier is invalid. +#[derive(Error, Debug, Clone, PartialEq, Eq)] +pub enum InvalidIdentifierReason { + #[error("identifier cannot be empty")] + Empty, + #[error("identifier must start with a letter or underscore, got '{0}'")] + InvalidStart(char), + #[error("identifier contains invalid character '{0}'")] + InvalidCharacter(char), + #[error("'{0}' is a reserved keyword")] + ReservedKeyword(String), +} + +/// Errors that can occur during LSP operations. +#[derive(Error, Debug)] +pub enum LspError { + /// Document was not found in the document manager. + #[error("document not found: {0}")] + DocumentNotFound(String), + + /// Position is invalid (e.g., line or character out of bounds). + #[error("invalid position: line {line}, character {character}")] + InvalidPosition { line: u32, character: u32 }, + + /// Byte offset is out of bounds for the document. + #[error("position out of bounds: offset {0}")] + PositionOutOfBounds(u32), + + /// No token found at the given position. + #[error("no token at position")] + NoTokenAtPosition, + + /// URI could not be parsed or converted to a path. + #[error("invalid URI: {0}")] + InvalidUri(String), + + /// URI is valid but is not a file URI. + #[error("URI is not a file URI: {0}")] + NonFileUri(String), + + /// File URI could not be converted to a filesystem path. + #[error("failed to convert URI to path: {0}")] + UriToPath(String), + + /// Filesystem path could not be converted to a URI. + #[error("failed to convert path to URI: {0}")] + PathToUri(String), + + /// Identifier is not valid for Jsonnet. + #[error("invalid identifier: {0}")] + InvalidIdentifier(InvalidIdentifierReason), + + /// IO error occurred. + #[error("IO error: {0}")] + Io(#[from] std::io::Error), +} + +/// Result type for LSP operations. +pub type LspResult = Result; + +/// Result type for handler functions that may return no result. +/// +/// This is used for handlers where `None` is a valid response (e.g., no hover info available) +/// but we also want to distinguish from actual errors. +pub type HandlerResult = Result, LspError>; + +/// Jsonnet language keywords that cannot be used as identifiers. +const JSONNET_KEYWORDS: &[&str] = &[ + "assert", + "else", + "error", + "false", + "for", + "function", + "if", + "import", + "importbin", + "importstr", + "in", + "local", + "null", + "self", + "super", + "tailstrict", + "then", + "true", +]; + +/// Check if a string is a valid Jsonnet identifier. +/// +/// Valid identifiers: +/// - Start with a letter (a-z, A-Z) or underscore +/// - Contain only letters, digits, and underscores +/// - Are not Jsonnet keywords +#[must_use] +pub fn is_valid_jsonnet_identifier(name: &str) -> bool { + if name.is_empty() { + return false; + } + + // Check first character + let mut chars = name.chars(); + let Some(first) = chars.next() else { + return false; + }; + if !first.is_ascii_alphabetic() && first != '_' { + return false; + } + + // Check remaining characters + for c in chars { + if !c.is_ascii_alphanumeric() && c != '_' { + return false; + } + } + + // Check not a keyword + !JSONNET_KEYWORDS.contains(&name) +} + +/// Validate an identifier for renaming operations. +/// +/// Returns `Ok(())` if valid, or an error describing why it's invalid. +/// +/// # Errors +/// Returns `Err(LspError::InvalidIdentifier)` when the identifier is empty, +/// starts with an invalid character, contains invalid characters, or is a keyword. +pub fn validate_identifier(name: &str) -> LspResult<()> { + if name.is_empty() { + return Err(LspError::InvalidIdentifier(InvalidIdentifierReason::Empty)); + } + + let mut chars = name.chars(); + let Some(first) = chars.next() else { + return Err(LspError::InvalidIdentifier(InvalidIdentifierReason::Empty)); + }; + if !first.is_ascii_alphabetic() && first != '_' { + return Err(LspError::InvalidIdentifier( + InvalidIdentifierReason::InvalidStart(first), + )); + } + + for c in chars { + if !c.is_ascii_alphanumeric() && c != '_' { + return Err(LspError::InvalidIdentifier( + InvalidIdentifierReason::InvalidCharacter(c), + )); + } + } + + if JSONNET_KEYWORDS.contains(&name) { + return Err(LspError::InvalidIdentifier( + InvalidIdentifierReason::ReservedKeyword(name.to_string()), + )); + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + + #[test] + fn test_valid_identifiers() { + assert!(is_valid_jsonnet_identifier("foo")); + assert!(is_valid_jsonnet_identifier("_foo")); + assert!(is_valid_jsonnet_identifier("foo123")); + assert!(is_valid_jsonnet_identifier("_")); + assert!(is_valid_jsonnet_identifier("camelCase")); + assert!(is_valid_jsonnet_identifier("snake_case")); + assert!(is_valid_jsonnet_identifier("SCREAMING_SNAKE")); + } + + #[test] + fn test_invalid_identifiers() { + assert!(!is_valid_jsonnet_identifier("")); + assert!(!is_valid_jsonnet_identifier("123foo")); + assert!(!is_valid_jsonnet_identifier("foo-bar")); + assert!(!is_valid_jsonnet_identifier("foo.bar")); + assert!(!is_valid_jsonnet_identifier("foo bar")); + } + + #[test] + fn test_keywords_are_invalid() { + assert!(!is_valid_jsonnet_identifier("local")); + assert!(!is_valid_jsonnet_identifier("function")); + assert!(!is_valid_jsonnet_identifier("if")); + assert!(!is_valid_jsonnet_identifier("then")); + assert!(!is_valid_jsonnet_identifier("else")); + assert!(!is_valid_jsonnet_identifier("true")); + assert!(!is_valid_jsonnet_identifier("false")); + assert!(!is_valid_jsonnet_identifier("null")); + assert!(!is_valid_jsonnet_identifier("self")); + assert!(!is_valid_jsonnet_identifier("super")); + } + + #[test] + fn test_validate_identifier_errors() { + validate_identifier("foo").expect("foo should be valid"); + + let err = validate_identifier("").expect_err("empty identifier should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::Empty) + ); + + let err = validate_identifier("123foo").expect_err("leading digit should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::InvalidStart('1')) + ); + + let err = validate_identifier("local").expect_err("keyword should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::ReservedKeyword(keyword)) + if keyword == "local" + ); + } +} diff --git a/crates/jrsonnet-lsp-document/src/file_ids.rs b/crates/jrsonnet-lsp-document/src/file_ids.rs new file mode 100644 index 00000000..c1f0728e --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/file_ids.rs @@ -0,0 +1,215 @@ +//! File identity and path interning utilities. + +use std::{collections::HashMap, path::PathBuf, sync::Arc}; + +use parking_lot::RwLock; + +use crate::CanonicalPath; + +/// Stable identifier for an interned file path. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct FileId(u32); + +impl FileId { + /// Create a file id from a raw integer. + #[must_use] + fn from_raw(raw: u32) -> Self { + Self(raw) + } + + /// Get the underlying integer representation. + #[must_use] + pub fn as_raw(self) -> u32 { + self.0 + } + + /// Get this id as a vector index. + #[must_use] + pub fn as_usize(self) -> usize { + self.0 as usize + } +} + +impl std::fmt::Display for FileId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// Bidirectional interner between canonical paths and stable file identifiers. +#[derive(Debug, Clone, Default)] +struct PathInterner { + path_to_id: HashMap, + id_to_path: Vec>, +} + +impl PathInterner { + /// Get an interned id for `path`, inserting it if needed. + pub fn intern(&mut self, path: &CanonicalPath) -> FileId { + if let Some(&id) = self.path_to_id.get(path.as_path()) { + return id; + } + + let raw = self.id_to_path.len() as u32; + let id = FileId::from_raw(raw); + self.path_to_id.insert(path.as_path().to_path_buf(), id); + self.id_to_path.push(Arc::new(path.clone())); + id + } + + /// Get an existing id for `path`. + #[must_use] + pub fn file(&self, path: &CanonicalPath) -> Option { + self.path_to_id.get(path.as_path()).copied() + } + + /// Resolve an interned id to its canonical path. + #[must_use] + pub fn path(&self, file: FileId) -> Option<&Arc> { + self.id_to_path.get(file.as_usize()) + } +} + +/// Read-only resolver for a shared interned path store. +#[derive(Debug, Clone)] +pub struct PathResolver { + interner: Arc>, +} + +impl PathResolver { + /// Resolve a canonical path to an already interned file identifier. + #[must_use] + pub fn file(&self, path: &CanonicalPath) -> Option { + self.interner.read().file(path) + } + + /// Borrow an interned file identifier's canonical path. + #[must_use] + pub fn path(&self, file: FileId) -> Option> { + self.interner.read().path(file).cloned() + } +} + +/// Thread-safe shared store for canonical paths and stable file ids. +#[derive(Debug, Clone, Default)] +pub struct PathStore { + interner: Arc>, +} + +impl PathStore { + /// Create an empty shared store. + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Get an interned id for `path`, inserting it if needed. + #[must_use] + pub fn intern(&self, path: &CanonicalPath) -> FileId { + self.interner.write().intern(path) + } + + /// Get a read-only resolver for already interned paths. + #[must_use] + pub fn resolver(&self) -> PathResolver { + PathResolver { + interner: Arc::clone(&self.interner), + } + } + + /// Borrow an interned file identifier's canonical path. + #[must_use] + pub fn path(&self, file: FileId) -> Option> { + self.interner.read().path(file).cloned() + } + + /// Returns `true` when two stores share the same underlying interner. + #[must_use] + pub fn shares_interner_with(&self, other: &Self) -> bool { + Arc::ptr_eq(&self.interner, &other.interner) + } +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use super::*; + + fn path(name: &str) -> CanonicalPath { + CanonicalPath::new(PathBuf::from(format!("/test/{name}.jsonnet"))) + } + + #[test] + fn test_path_interner_reuses_id_for_same_path() { + let mut interner = PathInterner::default(); + let alpha = path("alpha"); + + let first = interner.intern(&alpha); + let second = interner.intern(&alpha); + + assert_eq!(first, second); + assert_eq!(interner.file(&alpha), Some(first)); + assert_eq!(interner.path(first).map(Arc::as_ref), Some(&alpha)); + } + + #[test] + fn test_path_interner_distinguishes_paths() { + let mut interner = PathInterner::default(); + let alpha = path("alpha"); + let beta = path("beta"); + + let alpha_id = interner.intern(&alpha); + let beta_id = interner.intern(&beta); + + assert_eq!(alpha_id, FileId::from_raw(0)); + assert_eq!(beta_id, FileId::from_raw(1)); + assert_eq!(interner.path(alpha_id).map(Arc::as_ref), Some(&alpha)); + assert_eq!(interner.path(beta_id).map(Arc::as_ref), Some(&beta)); + } + + #[test] + fn test_path_interner_unknown_lookup() { + let interner = PathInterner::default(); + let alpha = path("alpha"); + + assert_eq!(interner.file(&alpha), None); + assert_eq!(interner.path(FileId::from_raw(0)), None); + } + + #[test] + fn test_path_store_shares_interned_ids_across_clones() { + let store = PathStore::new(); + let other = store.clone(); + let alpha = path("alpha"); + + let id = store.intern(&alpha); + let resolver = other.resolver(); + assert_eq!(resolver.file(&alpha), Some(id)); + assert_eq!(resolver.path(id).as_deref(), Some(&alpha)); + } + + #[test] + fn test_path_store_path_borrows_without_cloning() { + let store = PathStore::new(); + let resolver = store.resolver(); + let alpha = path("alpha"); + let id = store.intern(&alpha); + + let first = resolver.path(id).expect("path should exist"); + let second = resolver.path(id).expect("path should exist"); + assert!(Arc::ptr_eq(&first, &second)); + assert_eq!(first.as_ref(), &alpha); + assert_eq!(second.as_ref(), &alpha); + } + + #[test] + fn test_path_store_shares_interner_identity() { + let store = PathStore::new(); + let clone = store.clone(); + let other = PathStore::new(); + + assert!(store.shares_interner_with(&clone)); + assert!(!store.shares_interner_with(&other)); + } +} diff --git a/crates/jrsonnet-lsp-document/src/lib.rs b/crates/jrsonnet-lsp-document/src/lib.rs new file mode 100644 index 00000000..1724c460 --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/lib.rs @@ -0,0 +1,34 @@ +//! Document parsing, position conversion, and AST utilities for Jsonnet LSP. +//! +//! This crate provides the foundational types and utilities for working with +//! Jsonnet documents in an LSP context: +//! +//! - [`Document`] and [`ParsedDocument`] for parsed Jsonnet files +//! - [`LineIndex`] for efficient position conversion +//! - Position types ([`ByteOffset`], [`LspPosition`], [`LspRange`]) +//! - AST utilities for common operations + +pub mod ast_utils; +pub mod config; +pub mod document; +pub mod error; +pub mod file_ids; +pub mod position; +pub mod types; + +pub use ast_utils::{ + find_node_at_offset, position_to_offset, strip_string_quotes, to_lsp_range, token_at_offset, +}; +pub use config::{ + DEFAULT_ANALYSIS_CACHE_CAPACITY, DEFAULT_CLOSED_CACHE_CAPACITY, DEFAULT_TYPE_CACHE_CAPACITY, +}; +pub use document::{Document, ParsedDocument, SharedDocument, SyntaxError}; +pub use error::{ + is_valid_jsonnet_identifier, validate_identifier, HandlerResult, LspError, LspResult, +}; +pub use file_ids::{FileId, PathResolver, PathStore}; +pub use position::LineIndex; +pub use types::{ + ByteOffset, CanonicalPath, CharOffset, DocVersion, Line, LspPosition, LspRange, SymbolName, + Utf16Offset, +}; diff --git a/crates/jrsonnet-lsp-document/src/position.rs b/crates/jrsonnet-lsp-document/src/position.rs new file mode 100644 index 00000000..dfef25de --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/position.rs @@ -0,0 +1,388 @@ +//! Line index for efficient position conversion. +//! +//! LSP uses UTF-16 code units for character positions, while Rust strings +//! and rowan use byte offsets. This module provides efficient conversion +//! between the two coordinate systems. + +use crate::types::{ByteOffset, CharOffset, Line, LspPosition, LspRange}; + +fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) +} + +/// Line index - stores offsets only, no string copies. +/// +/// This allows O(1) line lookup and `O(line_length)` character offset conversion. +#[derive(Debug, Clone)] +pub struct LineIndex { + /// Byte offset of each line start (including line 0 at offset 0). + line_starts: Vec, +} + +impl LineIndex { + /// Build from source text - O(n) single pass, one allocation. + #[must_use] + pub fn new(text: &str) -> Self { + let mut line_starts = vec![ByteOffset(0)]; + + for (i, ch) in text.char_indices() { + if ch == '\n' { + line_starts.push(ByteOffset(to_u32(i + 1))); + } + } + + Self { line_starts } + } + + /// Get the number of lines in the document. + #[must_use] + pub fn line_count(&self) -> u32 { + to_u32(self.line_starts.len()) + } + + /// Get the byte offset of a line start. + #[must_use] + pub fn line_start(&self, line: Line) -> Option { + self.line_starts.get(line.0 as usize).copied() + } + + /// Get the line number for a byte offset. + #[must_use] + pub fn line_of_offset(&self, offset: ByteOffset) -> Line { + // Binary search for the line containing this offset + match self.line_starts.binary_search(&offset) { + Ok(line) => Line(to_u32(line)), + Err(line) => Line(to_u32(line.saturating_sub(1))), + } + } + + /// Convert LSP position to byte offset. + /// + /// Returns None if the position is out of bounds. + #[must_use] + pub fn offset(&self, pos: LspPosition, text: &str) -> Option { + let line_start = self.line_start(pos.line)?; + let line_start_usize: usize = line_start.into(); + + // Find the end of this line (exclude newline) + let next_line = Line(pos.line.0 + 1); + let line_end = self + .line_start(next_line) + .map_or(text.len(), |o| usize::from(o).saturating_sub(1)); + + let line_text = text.get(line_start_usize..line_end)?; + + // Walk the line, counting UTF-16 code units + let mut utf16_count = 0u32; + for (byte_idx, ch) in line_text.char_indices() { + if utf16_count >= pos.character.0 { + return Some(ByteOffset(to_u32(line_start_usize + byte_idx))); + } + utf16_count += to_u32(ch.len_utf16()); + } + + // Position is at or past end of line + Some(ByteOffset(to_u32(line_start_usize + line_text.len()))) + } + + /// Convert byte offset to LSP position. + /// + /// Returns None if the offset is out of bounds. + #[must_use] + pub fn position(&self, offset: ByteOffset, text: &str) -> Option { + let offset_usize: usize = offset.into(); + if offset_usize > text.len() { + return None; + } + + let line = self.line_of_offset(offset); + let line_start: usize = self.line_start(line)?.into(); + + // Count UTF-16 code units from line start to offset + let line_prefix = text.get(line_start..offset_usize)?; + let character: u32 = line_prefix.chars().map(|ch| to_u32(ch.len_utf16())).sum(); + + Some(LspPosition { + line, + character: CharOffset(character), + }) + } + + /// Convert a rowan `TextRange` to an LSP Range. + #[must_use] + pub fn range(&self, range: rowan::TextRange, text: &str) -> Option { + let start = self.position(range.start().into(), text)?; + let end = self.position(range.end().into(), text)?; + Some(LspRange { start, end }) + } + + /// Convert an LSP Range to a rowan `TextRange`. + #[must_use] + pub fn text_range(&self, range: LspRange, text: &str) -> Option { + let start = self.offset(range.start, text)?; + let end = self.offset(range.end, text)?; + Some(rowan::TextRange::new(start.into(), end.into())) + } + + /// Get the text of a specific line (without trailing newline). + #[must_use] + pub fn line_text<'a>(&self, line: Line, text: &'a str) -> Option<&'a str> { + let start: usize = self.line_start(line)?.into(); + let next_line = Line(line.0 + 1); + let end = self + .line_start(next_line) + .map_or(text.len(), |o| usize::from(o).saturating_sub(1)); + text.get(start..end) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_line_index_basic() { + let text = "hello\nworld\n"; + let index = LineIndex::new(text); + + assert_eq!(index.line_count(), 3); // "hello", "world", "" + assert_eq!(index.line_start(Line(0)), Some(ByteOffset(0))); + assert_eq!(index.line_start(Line(1)), Some(ByteOffset(6))); + assert_eq!(index.line_start(Line(2)), Some(ByteOffset(12))); + } + + #[test] + fn test_line_of_offset() { + let text = "hello\nworld\n"; + let index = LineIndex::new(text); + + assert_eq!(index.line_of_offset(ByteOffset(0)), Line(0)); + assert_eq!(index.line_of_offset(ByteOffset(3)), Line(0)); + assert_eq!(index.line_of_offset(ByteOffset(5)), Line(0)); // 'o' in hello + assert_eq!(index.line_of_offset(ByteOffset(6)), Line(1)); // 'w' in world + assert_eq!(index.line_of_offset(ByteOffset(11)), Line(1)); // 'd' in world + } + + #[test] + fn test_lsp_position_to_offset() { + let text = "hello\nworld"; + let index = LineIndex::new(text); + + // Start of file + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(0) + }, + text + ), + Some(ByteOffset(0)) + ); + + // Middle of first line + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(3) + }, + text + ), + Some(ByteOffset(3)) + ); + + // Start of second line + assert_eq!( + index.offset( + LspPosition { + line: Line(1), + character: CharOffset(0) + }, + text + ), + Some(ByteOffset(6)) + ); + + // End of second line + assert_eq!( + index.offset( + LspPosition { + line: Line(1), + character: CharOffset(5) + }, + text + ), + Some(ByteOffset(11)) + ); + } + + #[test] + fn test_offset_to_lsp_position() { + let text = "hello\nworld"; + let index = LineIndex::new(text); + + assert_eq!( + index.position(ByteOffset(0), text), + Some(LspPosition { + line: Line(0), + character: CharOffset(0) + }) + ); + + assert_eq!( + index.position(ByteOffset(3), text), + Some(LspPosition { + line: Line(0), + character: CharOffset(3) + }) + ); + + assert_eq!( + index.position(ByteOffset(6), text), + Some(LspPosition { + line: Line(1), + character: CharOffset(0) + }) + ); + + assert_eq!( + index.position(ByteOffset(11), text), + Some(LspPosition { + line: Line(1), + character: CharOffset(5) + }) + ); + } + + #[test] + fn test_utf16_handling() { + // '🦀' is 4 bytes in UTF-8 but 2 UTF-16 code units + let text = "a🦀b"; + let index = LineIndex::new(text); + + // 'a' is at character 0 + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(0) + }, + text + ), + Some(ByteOffset(0)) + ); + + // '🦀' is at character 1 (UTF-16), byte offset 1 + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(1) + }, + text + ), + Some(ByteOffset(1)) + ); + + // 'b' is at character 3 (UTF-16: 1 for 'a' + 2 for '🦀'), byte offset 5 + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(3) + }, + text + ), + Some(ByteOffset(5)) + ); + + // Reverse: byte offset 5 -> character 3 + assert_eq!( + index.position(ByteOffset(5), text), + Some(LspPosition { + line: Line(0), + character: CharOffset(3) + }) + ); + } + + #[test] + fn test_line_text() { + let text = "hello\nworld\n"; + let index = LineIndex::new(text); + + assert_eq!(index.line_text(Line(0), text), Some("hello")); + assert_eq!(index.line_text(Line(1), text), Some("world")); + assert_eq!(index.line_text(Line(2), text), Some("")); + } + + #[test] + fn test_empty_file() { + let text = ""; + let index = LineIndex::new(text); + + assert_eq!(index.line_count(), 1); + assert_eq!(index.line_start(Line(0)), Some(ByteOffset(0))); + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(0) + }, + text + ), + Some(ByteOffset(0)) + ); + } + + #[test] + fn test_crlf_line_endings() { + // Windows-style CRLF line endings + let text = "hello\r\nworld\r\n"; + let index = LineIndex::new(text); + + // Line text includes \r (we only split on \n) + let expected = LineIndex { + line_starts: vec![ByteOffset(0), ByteOffset(7), ByteOffset(14)], + }; + assert_eq!(index.line_starts, expected.line_starts); + } + + #[test] + fn test_cr_only_line_endings() { + // Old Mac-style CR-only line endings (rare) + // CR alone is NOT treated as a line ending + let text = "hello\rworld\r"; + let index = LineIndex::new(text); + + let expected = LineIndex { + line_starts: vec![ByteOffset(0)], + }; + assert_eq!(index.line_starts, expected.line_starts); + } + + #[test] + fn test_mixed_line_endings() { + // Mix of LF and CRLF: "line1\n" (6) + "line2\r\n" (7) + "line3\n" (6) + let text = "line1\nline2\r\nline3\n"; + let index = LineIndex::new(text); + + let expected = LineIndex { + line_starts: vec![ByteOffset(0), ByteOffset(6), ByteOffset(13), ByteOffset(19)], + }; + assert_eq!(index.line_starts, expected.line_starts); + } + + #[test] + fn test_lf_only_line_endings() { + // Unix-style LF-only (most common) + let text = "hello\nworld\n"; + let index = LineIndex::new(text); + + let expected = LineIndex { + line_starts: vec![ByteOffset(0), ByteOffset(6), ByteOffset(12)], + }; + assert_eq!(index.line_starts, expected.line_starts); + } +} diff --git a/crates/jrsonnet-lsp-document/src/types.rs b/crates/jrsonnet-lsp-document/src/types.rs new file mode 100644 index 00000000..3d2ed897 --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/types.rs @@ -0,0 +1,383 @@ +//! Domain-specific types for the LSP. +//! +//! These provide semantic clarity and type safety for common operations +//! like position conversion between byte offsets and LSP UTF-16 positions. + +use std::path::PathBuf; + +use derive_more::{AsRef, Deref, Display, From, Into}; +use url::Url; + +use crate::error::{validate_identifier, LspError, LspResult}; + +/// Byte offset within a document (rowan uses byte offsets). +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, From, Into, Default)] +pub struct ByteOffset(pub u32); + +impl ByteOffset { + #[must_use] + pub fn new(offset: u32) -> Self { + Self(offset) + } +} + +impl From for ByteOffset { + fn from(ts: rowan::TextSize) -> Self { + Self(ts.into()) + } +} + +impl From for rowan::TextSize { + fn from(offset: ByteOffset) -> Self { + rowan::TextSize::from(offset.0) + } +} + +impl From for ByteOffset { + fn from(offset: usize) -> Self { + Self(u32::try_from(offset).unwrap_or(u32::MAX)) + } +} + +impl From for usize { + fn from(offset: ByteOffset) -> Self { + offset.0 as usize + } +} + +/// UTF-16 offset (LSP uses UTF-16 code units). +#[derive(Debug, Clone, Copy, PartialEq, Eq, From, Into, Default)] +pub struct Utf16Offset(pub u32); + +/// Line number (0-indexed). +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, From, Into, Default)] +pub struct Line(pub u32); + +/// Character offset within a line (0-indexed, UTF-16 code units). +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, From, Into, Default)] +pub struct CharOffset(pub u32); + +/// LSP position (0-indexed line, UTF-16 character). +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub struct LspPosition { + pub line: Line, + pub character: CharOffset, +} + +impl From<(u32, u32)> for LspPosition { + fn from((line, character): (u32, u32)) -> Self { + Self { + line: Line(line), + character: CharOffset(character), + } + } +} + +impl From for LspPosition { + fn from(p: lsp_types::Position) -> Self { + Self { + line: Line(p.line), + character: CharOffset(p.character), + } + } +} + +impl From for lsp_types::Position { + fn from(p: LspPosition) -> Self { + Self { + line: p.line.0, + character: p.character.0, + } + } +} + +/// LSP range (start and end positions). +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub struct LspRange { + pub start: LspPosition, + pub end: LspPosition, +} + +impl From for LspRange { + fn from(r: lsp_types::Range) -> Self { + Self { + start: r.start.into(), + end: r.end.into(), + } + } +} + +impl From for lsp_types::Range { + fn from(r: LspRange) -> Self { + Self { + start: r.start.into(), + end: r.end.into(), + } + } +} + +/// Normalized canonical path as cache key. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CanonicalPath(PathBuf); + +impl std::fmt::Display for CanonicalPath { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.0.to_string_lossy()) + } +} + +impl CanonicalPath { + /// Create a new canonical path from an already-canonicalized path. + #[must_use] + pub fn new(path: PathBuf) -> Self { + Self(path) + } + + /// Try to create a canonical path, canonicalizing if needed. + /// + /// # Errors + /// Returns any I/O error from [`std::path::Path::canonicalize`]. + pub fn try_from_path(path: &std::path::Path) -> std::io::Result { + Ok(Self(path.canonicalize()?)) + } + + /// Create from a URI. + /// + /// # Errors + /// Returns `Err(LspError::InvalidUri)` when URI parsing fails, + /// `Err(LspError::NonFileUri)` when URI scheme is not `file`, + /// or `Err(LspError::UriToPath)` when URI cannot be converted to a path. + pub fn from_uri(uri: &lsp_types::Uri) -> LspResult { + let uri_str = uri.as_str(); + let parsed = Url::parse(uri_str).map_err(|_| LspError::InvalidUri(uri_str.to_string()))?; + if parsed.scheme() != "file" { + return Err(LspError::NonFileUri(uri_str.to_string())); + } + let path = parsed + .to_file_path() + .map_err(|()| LspError::UriToPath(uri_str.to_string()))?; + Ok(Self(path.canonicalize().unwrap_or(path))) + } + + /// Convert to a file URI. + /// + /// # Errors + /// Returns `Err(LspError::PathToUri)` when the path cannot be converted + /// into a file URI, or `Err(LspError::InvalidUri)` if URI parsing fails. + pub fn to_uri(&self) -> LspResult { + let url = + Url::from_file_path(&self.0).map_err(|()| LspError::PathToUri(self.to_string()))?; + url.as_str() + .parse() + .map_err(|_| LspError::InvalidUri(url.to_string())) + } + + /// Get the inner path. + #[must_use] + pub fn as_path(&self) -> &std::path::Path { + &self.0 + } +} + +/// Document version (monotonically increasing). +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, From, Into, Default)] +pub struct DocVersion(pub i32); + +impl DocVersion { + #[must_use] + pub fn new(version: i32) -> Self { + Self(version) + } +} + +/// A validated Jsonnet identifier (symbol name). +/// +/// This type ensures that any identifier used in rename operations or +/// symbol lookups is a valid Jsonnet identifier at the type level. +#[derive(Debug, Clone, PartialEq, Eq, Hash, Deref, AsRef, Display)] +#[display("{}", _0)] +pub struct SymbolName(String); + +impl SymbolName { + /// Create a new symbol name, validating that it's a valid Jsonnet identifier. + /// + /// Returns an error if the name is empty, starts with a digit, + /// contains invalid characters, or is a reserved keyword. + /// + /// # Errors + /// Returns `Err(LspError::InvalidIdentifier)` when the name is not a valid + /// Jsonnet identifier. + pub fn new(s: &str) -> LspResult { + validate_identifier(s)?; + Ok(Self(s.to_string())) + } + + /// Create a symbol name from an already-validated string. + /// + /// This is useful when extracting identifiers from parsed AST tokens, + /// which are guaranteed to be syntactically valid identifiers. + /// + /// # Safety + /// The caller must ensure the string is a valid Jsonnet identifier. + #[must_use] + pub fn from_token(s: &str) -> Self { + Self(s.to_string()) + } +} + +#[cfg(test)] +mod tests { + use std::time::{SystemTime, UNIX_EPOCH}; + + use assert_matches::assert_matches; + + use super::*; + use crate::error::InvalidIdentifierReason; + + #[test] + fn test_byte_offset_conversions() { + let offset = ByteOffset::new(42); + assert_eq!(offset.0, 42); + + let ts: rowan::TextSize = offset.into(); + assert_eq!(u32::from(ts), 42); + + let offset2: ByteOffset = ts.into(); + assert_eq!(offset2, offset); + } + + #[test] + fn test_lsp_position_conversion() { + let lsp_pos = lsp_types::Position { + line: 10, + character: 5, + }; + let pos: LspPosition = lsp_pos.into(); + assert_eq!(pos.line, Line(10)); + assert_eq!(pos.character, CharOffset(5)); + + let back: lsp_types::Position = pos.into(); + assert_eq!(back, lsp_pos); + } + + #[test] + fn test_line_char_offset_ordering() { + assert!(Line(0) < Line(1)); + assert!(CharOffset(0) < CharOffset(10)); + } + + #[test] + fn test_symbol_name_valid() { + let name = SymbolName::new("foo").expect("foo should be a valid symbol name"); + assert_eq!(&*name, "foo"); + assert_eq!(name.as_ref(), "foo"); + + // from_token for already-validated identifiers + let name2 = SymbolName::from_token("bar"); + assert_eq!(&*name2, "bar"); + } + + #[test] + fn test_symbol_name_invalid() { + // Empty + let err = SymbolName::new("").expect_err("empty name should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::Empty) + ); + + // Starts with digit + let err = SymbolName::new("123foo").expect_err("leading digit should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::InvalidStart('1')) + ); + + // Invalid characters + let err = SymbolName::new("foo-bar").expect_err("dash should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::InvalidCharacter('-')) + ); + let err = SymbolName::new("foo.bar").expect_err("dot should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::InvalidCharacter('.')) + ); + + // Keywords + let err = SymbolName::new("local").expect_err("keyword should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::ReservedKeyword(keyword)) + if keyword == "local" + ); + let err = SymbolName::new("function").expect_err("keyword should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::ReservedKeyword(keyword)) + if keyword == "function" + ); + let err = SymbolName::new("if").expect_err("keyword should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::ReservedKeyword(keyword)) + if keyword == "if" + ); + } + + #[test] + fn test_symbol_name_display() { + let name = SymbolName::new("myVar").expect("myVar should be a valid symbol name"); + assert_eq!(format!("{name}"), "myVar"); + } + + fn unique_test_dir(label: &str) -> PathBuf { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("clock should be after unix epoch") + .as_nanos(); + std::env::temp_dir().join(format!( + "jrsonnet-lsp-document-{label}-{}-{nanos}", + std::process::id() + )) + } + + #[test] + fn test_canonical_path_from_uri_rejects_non_file_uri() { + let uri: lsp_types::Uri = "https://example.com/test.jsonnet" + .parse() + .expect("URI should parse"); + let err = CanonicalPath::from_uri(&uri).expect_err("non-file URI should fail"); + assert_matches!(err, LspError::NonFileUri(value) if value == uri.as_str()); + } + + #[test] + fn test_canonical_path_uri_round_trip_with_escaped_chars() { + let dir = unique_test_dir("uri-roundtrip"); + std::fs::create_dir_all(&dir).expect("create test directory"); + let file_path = dir.join("a #b.jsonnet"); + std::fs::write(&file_path, "{}").expect("create test file"); + let canonical = file_path.canonicalize().expect("canonicalize test file"); + + let url = Url::from_file_path(&canonical).expect("build file URL"); + let uri: lsp_types::Uri = url.as_str().parse().expect("parse URI"); + let path = CanonicalPath::from_uri(&uri).expect("decode URI to path"); + assert_eq!(path.as_path(), canonical.as_path()); + + let roundtrip_uri = path.to_uri().expect("encode path to URI"); + let roundtrip_path = CanonicalPath::from_uri(&roundtrip_uri).expect("decode roundtrip URI"); + assert_eq!(roundtrip_path, path); + + std::fs::remove_dir_all(dir).expect("remove test directory"); + } + + #[test] + fn test_canonical_path_to_uri_rejects_relative_path() { + let path = CanonicalPath::new(PathBuf::from("relative.jsonnet")); + let err = path + .to_uri() + .expect_err("relative path should fail URI conversion"); + assert_matches!(err, LspError::PathToUri(value) if value == "relative.jsonnet"); + } +} diff --git a/crates/jrsonnet-lsp-handlers/Cargo.toml b/crates/jrsonnet-lsp-handlers/Cargo.toml new file mode 100644 index 00000000..a48902c3 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "jrsonnet-lsp-handlers" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "LSP request handlers for jrsonnet LSP" + +[dependencies] +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-lsp-import = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-import" } +jrsonnet-lsp-inference = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-inference" } +jrsonnet-lsp-scope = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scope" } +jrsonnet-lsp-stdlib = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-stdlib" } +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } +jrsonnet-fmt = { version = "0.5.0-pre97", path = "../../cmds/jrsonnet-fmt" } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +lsp-types.workspace = true +rayon = "1.11.0" +serde = { workspace = true, features = ["derive"] } +rowan.workspace = true +serde_json.workspace = true +strum = { version = "0.26", features = ["derive"] } +thiserror.workspace = true +tracing = "0.1.44" + +[lints] +workspace = true + +[dev-dependencies] +assert_matches = "1.5.0" +indoc.workspace = true +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +rstest = "0.23" +tempfile.workspace = true diff --git a/crates/jrsonnet-lsp-handlers/src/code_action/mod.rs b/crates/jrsonnet-lsp-handlers/src/code_action/mod.rs new file mode 100644 index 00000000..babbe30d --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_action/mod.rs @@ -0,0 +1,112 @@ +//! Code action handler. +//! +//! Provides quick fixes for diagnostics. + +mod quickfix; +mod remove_unused; + +use jrsonnet_lsp_document::Document; +use lsp_types::{ + CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, NumberOrString, Range, Uri, +}; +use remove_unused::{ + remove_all_unused_bindings_action_with_policy, remove_unused_binding_action_with_policy, + RemoveUnusedPolicy, +}; +use serde::{Deserialize, Serialize}; + +use self::quickfix::unused_variable_action; + +const UNUSED_VARIABLE_CODE: &str = "unused-variable"; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum RemoveUnusedMode { + #[default] + All, + ImportBindings, + NonImportBindings, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum RemoveUnusedCommentsMode { + #[default] + None, + Above, + Below, + All, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +#[serde(default, rename_all = "camelCase")] +pub struct CodeActionConfig { + pub remove_unused: RemoveUnusedMode, + pub remove_unused_comments: RemoveUnusedCommentsMode, +} + +fn is_unused_variable_diagnostic(diagnostic: &Diagnostic) -> bool { + matches!( + diagnostic.code.as_ref(), + Some(NumberOrString::String(code)) if code == UNUSED_VARIABLE_CODE + ) +} + +fn range_overlaps(a: Range, b: Range) -> bool { + (a.start.line, a.start.character) <= (b.end.line, b.end.character) + && (b.start.line, b.start.character) <= (a.end.line, a.end.character) +} + +fn wants_quickfix(context: &CodeActionContext) -> bool { + context.only.as_ref().is_none_or(|kinds| { + kinds + .iter() + .any(|kind| kind.as_str().starts_with(CodeActionKind::QUICKFIX.as_str())) + }) +} + +fn wants_fix_all(context: &CodeActionContext) -> bool { + context.only.as_ref().is_none_or(|kinds| { + kinds.iter().any(|kind| { + kind.as_str() + .starts_with(CodeActionKind::SOURCE_FIX_ALL.as_str()) + }) + }) +} + +/// Build code actions for a given range and context. +pub fn code_actions( + document: &Document, + uri: &Uri, + range: Range, + context: &CodeActionContext, + config: &CodeActionConfig, +) -> Vec { + let mut actions = Vec::new(); + let policy = RemoveUnusedPolicy::from_config(*config); + + if wants_quickfix(context) { + actions.extend( + context + .diagnostics + .iter() + .filter(|diagnostic| range_overlaps(diagnostic.range, range)) + .flat_map(|diagnostic| { + [ + unused_variable_action(document, uri, diagnostic), + remove_unused_binding_action_with_policy(document, uri, diagnostic, policy), + ] + .into_iter() + .flatten() + }), + ); + } + + if let Some(fix_all_action) = + remove_all_unused_bindings_action_with_policy(document, uri, context, policy) + { + actions.push(fix_all_action); + } + + actions +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_action/quickfix.rs b/crates/jrsonnet-lsp-handlers/src/code_action/quickfix.rs new file mode 100644 index 00000000..9838955c --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_action/quickfix.rs @@ -0,0 +1,154 @@ +use std::collections::HashMap; + +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::{ + CodeAction, CodeActionKind, CodeActionOrCommand, Diagnostic, TextEdit, Uri, WorkspaceEdit, +}; + +use super::is_unused_variable_diagnostic; + +pub(super) fn unused_variable_action( + document: &Document, + uri: &Uri, + diagnostic: &Diagnostic, +) -> Option { + if !is_unused_variable_diagnostic(diagnostic) { + return None; + } + + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(diagnostic.range.start.into(), text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name = token.text(); + if name.starts_with('_') { + return None; + } + + let mut changes = HashMap::new(); + changes.insert( + uri.clone(), + vec![TextEdit { + range: to_lsp_range(token.text_range(), line_index, text), + new_text: format!("_{name}"), + }], + ); + + Some( + CodeAction { + title: format!("Prefix `{name}` with `_`"), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + } + .into(), + ) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use lsp_types::{ + CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, DiagnosticSeverity, + NumberOrString, Position, Range, Uri, + }; + + use super::super::{code_actions, CodeActionConfig, UNUSED_VARIABLE_CODE}; + + fn diag_unused(range: Range) -> Diagnostic { + Diagnostic { + range, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String(UNUSED_VARIABLE_CODE.to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + } + } + + fn uri() -> Uri { + "file:///test/code_action.jsonnet".parse().unwrap() + } + + fn range(start: u32, end: u32) -> Range { + Range { + start: Position { + line: 0, + character: start, + }, + end: Position { + line: 0, + character: end, + }, + } + } + + #[test] + fn test_unused_variable_quickfix() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: None, + trigger_kind: None, + }; + + let actions = code_actions( + &document, + &uri(), + range(0, 20), + &context, + &CodeActionConfig::default(), + ); + let first = actions.first().expect("should include at least one action"); + let CodeActionOrCommand::CodeAction(action) = first else { + panic!("expected a code action") + }; + + assert_eq!(action.title, "Prefix `x` with `_`"); + assert_eq!(action.kind, Some(CodeActionKind::QUICKFIX)); + let changes = action + .edit + .as_ref() + .and_then(|edit| edit.changes.as_ref()) + .expect("quickfix should include edits"); + let edits = changes.get(&uri()).expect("edits should target test uri"); + assert_eq!(edits[0].new_text, "_x"); + } + + #[test] + fn test_code_action_skips_non_identifier_range() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(8, 9))], // '=' character + only: None, + trigger_kind: None, + }; + + let actions = code_actions( + &document, + &uri(), + range(0, 20), + &context, + &CodeActionConfig::default(), + ); + assert!(actions.is_empty()); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/edits.rs b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/edits.rs new file mode 100644 index 00000000..8f6a917b --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/edits.rs @@ -0,0 +1,605 @@ +use jrsonnet_lsp_document::{token_at_offset, Document}; +use jrsonnet_rowan_parser::{ + nodes::{BindDestruct, BindFunction, Destruct, Expr, ExprBase, MemberBindStmt, StmtLocal}, + AstNode, SyntaxKind, SyntaxNode, +}; +use lsp_types::Diagnostic; +use rowan::TextRange; + +use super::{ + ranges::{expand_range_with_policy, remove_range_for_list_entry}, + RemoveUnusedPolicy, +}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) struct RemoveUnusedEdit { + pub(super) binding_name: String, + pub(super) range: TextRange, +} +fn is_import_expression(expr: Expr) -> bool { + match expr.expr_base() { + Some(ExprBase::ExprImport(_)) => true, + Some(ExprBase::ExprParened(parened)) => parened.expr().is_some_and(is_import_expression), + _ => false, + } +} + +fn remove_edit_for_bind( + document: &Document, + policy: RemoveUnusedPolicy, + binding_name: String, + bind_node: SyntaxNode, + value_expr: Option, +) -> Option<(RemoveUnusedEdit, bool)> { + if binding_name.starts_with('_') { + return None; + } + + let removal_range = if let Some(stmt_local) = bind_node.ancestors().find_map(StmtLocal::cast) { + let bind_count = stmt_local.binds().count(); + if bind_count == 1 { + stmt_local.syntax().text_range() + } else { + remove_range_for_list_entry(&bind_node)? + } + } else if let Some(member_bind_stmt) = bind_node.ancestors().find_map(MemberBindStmt::cast) { + remove_range_for_list_entry(member_bind_stmt.syntax())? + } else { + return None; + }; + + let expanded_range = + expand_range_with_policy(document.ast().syntax(), removal_range, policy.comments)?; + let import_binding = value_expr.is_some_and(is_import_expression); + Some(( + RemoveUnusedEdit { + binding_name, + range: expanded_range, + }, + import_binding, + )) +} + +pub(super) fn remove_unused_edit_for_diagnostic( + document: &Document, + diagnostic: &Diagnostic, + policy: RemoveUnusedPolicy, +) -> Option<(RemoveUnusedEdit, bool)> { + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(diagnostic.range.start.into(), text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + if token.kind() != SyntaxKind::IDENT { + return None; + } + + if let Some(bind_destruct) = token.parent()?.ancestors().find_map(BindDestruct::cast) { + let destruct = BindDestruct::into(&bind_destruct)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let name = full.name()?.ident_lit()?.text().to_string(); + return remove_edit_for_bind( + document, + policy, + name, + bind_destruct.syntax().clone(), + bind_destruct.value(), + ); + } + + let bind_function = token.parent()?.ancestors().find_map(BindFunction::cast)?; + let name = bind_function.name()?.ident_lit()?.text().to_string(); + remove_edit_for_bind( + document, + policy, + name, + bind_function.syntax().clone(), + bind_function.value(), + ) +} +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use jrsonnet_lsp_document::{DocVersion, Document}; + use lsp_types::{ + CodeAction, CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, + DiagnosticSeverity, NumberOrString, Position, Range, TextEdit, Uri, WorkspaceEdit, + }; + + use super::super::super::{ + code_actions, CodeActionConfig, RemoveUnusedCommentsMode, RemoveUnusedMode, + UNUSED_VARIABLE_CODE, + }; + + fn diag_unused(range: Range) -> Diagnostic { + Diagnostic { + range, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String(UNUSED_VARIABLE_CODE.to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + } + } + + fn uri() -> Uri { + "file:///test/code_action.jsonnet".parse().unwrap() + } + + fn range(start: u32, end: u32) -> Range { + Range { + start: Position { + line: 0, + character: start, + }, + end: Position { + line: 0, + character: end, + }, + } + } + + fn span(start_line: u32, start_char: u32, end_line: u32, end_char: u32) -> Range { + Range { + start: Position { + line: start_line, + character: start_char, + }, + end: Position { + line: end_line, + character: end_char, + }, + } + } + + #[test] + fn test_code_action_returns_fix_all_for_source_fix_all_filter() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: Some(vec![CodeActionKind::SOURCE_FIX_ALL]), + trigger_kind: None, + }; + + let actions = code_actions( + &document, + &uri(), + range(0, 20), + &context, + &CodeActionConfig::default(), + ); + assert_eq!( + actions, + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(0, 11), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + })] + ); + } + + #[test] + fn test_remove_unused_binding_handles_multi_bind_statement() { + let document = Document::new("local x = 1, y = 2; y".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: None, + trigger_kind: None, + }; + + assert_eq!( + code_actions( + &document, + &uri(), + range(0, 22), + &context, + &CodeActionConfig::default(), + ), + vec![ + CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 7), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(5, 13), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(5, 13), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] + ); + } + + #[test] + fn test_remove_unused_binding_handles_object_local() { + let document = Document::new("{ local x = 1, a: x }".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(8, 9))], + only: None, + trigger_kind: None, + }; + + assert_eq!( + code_actions( + &document, + &uri(), + range(0, 21), + &context, + &CodeActionConfig::default(), + ), + vec![ + CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(8, 9))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(8, 9), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(8, 9))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(1, 15), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(range(8, 9))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(1, 15), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] + ); + } + + #[test] + fn test_fix_all_removes_entire_local_when_all_bindings_unused() { + let document = Document::new("local x = 1, y = 2; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7)), diag_unused(range(13, 14))], + only: Some(vec![CodeActionKind::SOURCE_FIX_ALL]), + trigger_kind: None, + }; + + assert_eq!( + code_actions( + &document, + &uri(), + range(0, 22), + &context, + &CodeActionConfig::default(), + ), + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(range(6, 7)), diag_unused(range(13, 14))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(0, 18), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + })] + ); + } + + #[test] + fn test_non_import_policy_skips_remove_actions_for_import_bindings() { + let document = Document::new( + "local x = import \"foo.libsonnet\"; 42".to_string(), + DocVersion::new(1), + ); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: None, + trigger_kind: None, + }; + let config = CodeActionConfig { + remove_unused: RemoveUnusedMode::NonImportBindings, + ..CodeActionConfig::default() + }; + + assert_eq!( + code_actions(&document, &uri(), range(0, 35), &context, &config), + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 7), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + })] + ); + } + + #[test] + fn test_non_import_policy_keeps_remove_actions_for_non_import_bindings() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: None, + trigger_kind: None, + }; + let config = CodeActionConfig { + remove_unused: RemoveUnusedMode::NonImportBindings, + ..CodeActionConfig::default() + }; + + assert_eq!( + code_actions(&document, &uri(), range(0, 20), &context, &config), + vec![ + CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 7), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(0, 11), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(0, 11), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] + ); + } + + #[test] + fn test_import_only_policy_skips_remove_actions_for_non_import_bindings() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: None, + trigger_kind: None, + }; + let config = CodeActionConfig { + remove_unused: RemoveUnusedMode::ImportBindings, + ..CodeActionConfig::default() + }; + + assert_eq!( + code_actions(&document, &uri(), range(0, 20), &context, &config), + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 7), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + })] + ); + } + + #[test] + fn test_fix_all_can_remove_above_comments_when_configured() { + let document = Document::new( + "// heading\nlocal x = 1;\n42".to_string(), + DocVersion::new(1), + ); + let context = CodeActionContext { + diagnostics: vec![diag_unused(span(1, 6, 1, 7))], + only: Some(vec![CodeActionKind::SOURCE_FIX_ALL]), + trigger_kind: None, + }; + let config = CodeActionConfig { + remove_unused_comments: RemoveUnusedCommentsMode::Above, + ..CodeActionConfig::default() + }; + + assert_eq!( + code_actions(&document, &uri(), span(0, 0, 2, 2), &context, &config), + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(span(1, 6, 1, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: span(0, 0, 1, 11), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + })] + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/mod.rs b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/mod.rs new file mode 100644 index 00000000..7e4a7119 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/mod.rs @@ -0,0 +1,181 @@ +mod edits; +mod ranges; + +use std::collections::HashMap; + +use jrsonnet_lsp_document::{to_lsp_range, Document}; +use lsp_types::{ + CodeAction, CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, TextEdit, Uri, + WorkspaceEdit, +}; + +use self::{edits::remove_unused_edit_for_diagnostic, ranges::removal_ranges_for_fix_all}; +use super::{ + is_unused_variable_diagnostic, wants_fix_all, CodeActionConfig, RemoveUnusedCommentsMode, + RemoveUnusedMode, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum RemovalFlavor { + All, + ImportBindings, + NonImportBindings, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum CommentPolicy { + None, + Above, + Below, + All, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) struct RemoveUnusedPolicy { + flavor: RemovalFlavor, + comments: CommentPolicy, +} + +impl RemovalFlavor { + const fn allows(self, import_binding: bool) -> bool { + match self { + Self::All => true, + Self::ImportBindings => import_binding, + Self::NonImportBindings => !import_binding, + } + } +} + +impl CommentPolicy { + const fn keeps_above_comments(self) -> bool { + matches!(self, Self::Above | Self::All) + } + + const fn keeps_below_comments(self) -> bool { + matches!(self, Self::Below | Self::All) + } +} + +impl RemoveUnusedPolicy { + pub(super) const fn from_config(config: CodeActionConfig) -> Self { + let flavor = match config.remove_unused { + RemoveUnusedMode::All => RemovalFlavor::All, + RemoveUnusedMode::ImportBindings => RemovalFlavor::ImportBindings, + RemoveUnusedMode::NonImportBindings => RemovalFlavor::NonImportBindings, + }; + let comments = match config.remove_unused_comments { + RemoveUnusedCommentsMode::None => CommentPolicy::None, + RemoveUnusedCommentsMode::Above => CommentPolicy::Above, + RemoveUnusedCommentsMode::Below => CommentPolicy::Below, + RemoveUnusedCommentsMode::All => CommentPolicy::All, + }; + Self { flavor, comments } + } +} +pub(super) fn remove_unused_binding_action_with_policy( + document: &Document, + uri: &Uri, + diagnostic: &Diagnostic, + policy: RemoveUnusedPolicy, +) -> Option { + if !is_unused_variable_diagnostic(diagnostic) { + return None; + } + + let text = document.text(); + let line_index = document.line_index(); + let (edit, import_binding) = remove_unused_edit_for_diagnostic(document, diagnostic, policy)?; + if !policy.flavor.allows(import_binding) { + return None; + } + + let mut changes = HashMap::new(); + changes.insert( + uri.clone(), + vec![TextEdit { + range: to_lsp_range(edit.range, line_index, text), + new_text: String::new(), + }], + ); + + Some( + CodeAction { + title: format!("Remove unused binding `{}`", edit.binding_name), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + } + .into(), + ) +} + +pub(super) fn remove_all_unused_bindings_action_with_policy( + document: &Document, + uri: &Uri, + context: &CodeActionContext, + policy: RemoveUnusedPolicy, +) -> Option { + if !wants_fix_all(context) { + return None; + } + + let diagnostics: Vec = context + .diagnostics + .iter() + .filter(|diagnostic| is_unused_variable_diagnostic(diagnostic)) + .filter(|diagnostic| { + remove_unused_edit_for_diagnostic(document, diagnostic, policy) + .is_some_and(|(_, import_binding)| policy.flavor.allows(import_binding)) + }) + .cloned() + .collect(); + if diagnostics.is_empty() { + return None; + } + + let mut ranges = removal_ranges_for_fix_all(document, &diagnostics, policy); + if ranges.is_empty() { + return None; + } + ranges.sort_unstable_by_key(|range| (range.start(), range.end())); + let text = document.text(); + let line_index = document.line_index(); + + let edits: Vec = ranges + .into_iter() + .rev() + .map(|range| TextEdit { + range: to_lsp_range(range, line_index, text), + new_text: String::new(), + }) + .collect(); + + let mut changes = HashMap::new(); + changes.insert(uri.clone(), edits); + + Some( + CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(diagnostics), + edit: Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + } + .into(), + ) +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/ranges.rs b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/ranges.rs new file mode 100644 index 00000000..91ca871f --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/ranges.rs @@ -0,0 +1,348 @@ +use std::collections::HashSet; + +use jrsonnet_lsp_document::{token_at_offset, Document}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, BindDestruct, Destruct, Member, ObjBodyMemberList, StmtLocal}, + rowan::{TextRange, TextSize, TokenAtOffset}, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use lsp_types::Diagnostic; + +use super::{is_unused_variable_diagnostic, CommentPolicy, RemoveUnusedPolicy}; + +const fn is_trivia_kind(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::WHITESPACE + | SyntaxKind::MULTI_LINE_COMMENT + | SyntaxKind::SINGLE_LINE_HASH_COMMENT + | SyntaxKind::SINGLE_LINE_SLASH_COMMENT + ) +} + +const fn is_comment_kind(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::MULTI_LINE_COMMENT + | SyntaxKind::SINGLE_LINE_HASH_COMMENT + | SyntaxKind::SINGLE_LINE_SLASH_COMMENT + ) +} + +const fn is_whitespace_kind(kind: SyntaxKind) -> bool { + matches!(kind, SyntaxKind::WHITESPACE) +} + +fn can_absorb_trivia(token: &SyntaxToken, keep_comments: bool) -> bool { + if is_whitespace_kind(token.kind()) { + return !token.text().contains("\n\n"); + } + + is_comment_kind(token.kind()) && keep_comments +} + +fn previous_significant_token(token: &SyntaxToken) -> Option { + std::iter::successors(token.prev_token(), SyntaxToken::prev_token) + .find(|candidate| !is_trivia_kind(candidate.kind())) +} + +fn next_significant_token(token: &SyntaxToken) -> Option { + std::iter::successors(token.next_token(), SyntaxToken::next_token) + .find(|candidate| !is_trivia_kind(candidate.kind())) +} + +fn single_line_trivia_end_after(token: &SyntaxToken) -> Option { + let trivia = token.next_token()?; + if !is_trivia_kind(trivia.kind()) || trivia.text().contains('\n') { + return None; + } + Some(trivia.text_range().end()) +} + +fn token_at_range_start(syntax: &SyntaxNode, range: TextRange) -> Option { + match syntax.token_at_offset(range.start()) { + TokenAtOffset::None => None, + TokenAtOffset::Single(token) => Some(token), + TokenAtOffset::Between(_, right) => Some(right), + } +} + +fn token_at_range_end(syntax: &SyntaxNode, range: TextRange) -> Option { + let end = range.end().checked_sub(TextSize::new(1))?; + match syntax.token_at_offset(end) { + TokenAtOffset::None => None, + TokenAtOffset::Single(token) => Some(token), + TokenAtOffset::Between(left, _) => Some(left), + } +} + +pub(super) fn expand_range_with_policy( + syntax: &SyntaxNode, + range: TextRange, + comments: CommentPolicy, +) -> Option { + let mut start = token_at_range_start(syntax, range)?; + while let Some(previous) = start.prev_token() { + if can_absorb_trivia(&previous, comments.keeps_above_comments()) { + start = previous; + continue; + } + break; + } + + let mut end = token_at_range_end(syntax, range)?; + while let Some(next) = end.next_token() { + if can_absorb_trivia(&next, comments.keeps_below_comments()) { + end = next; + continue; + } + break; + } + + Some(TextRange::new( + start.text_range().start(), + end.text_range().end(), + )) +} + +pub(super) fn remove_range_for_list_entry(entry: &SyntaxNode) -> Option { + let first = entry.first_token()?; + let last = entry.last_token()?; + + if let Some(next) = next_significant_token(&last) { + if next.kind() == SyntaxKind::COMMA { + let end = + single_line_trivia_end_after(&next).unwrap_or_else(|| next.text_range().end()); + return Some(TextRange::new(first.text_range().start(), end)); + } + } + + if let Some(previous) = previous_significant_token(&first) { + if previous.kind() == SyntaxKind::COMMA { + return Some(TextRange::new( + previous.text_range().start(), + last.text_range().end(), + )); + } + } + + Some(entry.text_range()) +} + +fn remove_range_for_entry_run( + entries: &[SyntaxNode], + run_start: usize, + run_end: usize, +) -> Option { + let first = entries.get(run_start)?.first_token()?; + let last = entries.get(run_end)?.last_token()?; + + let start = if run_start == 0 { + first.text_range().start() + } else { + let previous = previous_significant_token(&first)?; + if previous.kind() == SyntaxKind::COMMA { + previous.text_range().start() + } else { + first.text_range().start() + } + }; + + let end = if run_start == 0 && run_end + 1 < entries.len() { + match next_significant_token(&last) { + Some(next) if next.kind() == SyntaxKind::COMMA => { + single_line_trivia_end_after(&next).unwrap_or_else(|| next.text_range().end()) + } + _ => last.text_range().end(), + } + } else { + last.text_range().end() + }; + + Some(TextRange::new(start, end)) +} + +fn contiguous_runs(indices: &[usize]) -> Vec<(usize, usize)> { + if indices.is_empty() { + return Vec::new(); + } + + let mut runs = Vec::new(); + let mut run_start = indices[0]; + let mut previous = indices[0]; + for &index in indices.iter().skip(1) { + if index == previous + 1 { + previous = index; + continue; + } + runs.push((run_start, previous)); + run_start = index; + previous = index; + } + runs.push((run_start, previous)); + runs +} + +fn bind_name_range(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bind_destruct) => { + let destruct = BindDestruct::into(bind_destruct)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) + } + Bind::BindFunction(bind_function) => Some(bind_function.name()?.syntax().text_range()), + } +} + +fn binding_name_range_for_diagnostic( + document: &Document, + diagnostic: &Diagnostic, +) -> Option { + if !is_unused_variable_diagnostic(diagnostic) { + return None; + } + + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(diagnostic.range.start.into(), text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + if token.kind() != SyntaxKind::IDENT { + return None; + } + Some(token.text_range()) +} + +fn unused_binding_name_ranges( + document: &Document, + diagnostics: &[Diagnostic], +) -> HashSet { + diagnostics + .iter() + .filter_map(|diagnostic| binding_name_range_for_diagnostic(document, diagnostic)) + .collect() +} + +fn removal_ranges_for_stmt_local( + stmt_local: &StmtLocal, + unused_name_ranges: &HashSet, +) -> Vec { + let binds: Vec = stmt_local.binds().collect(); + let unused_indices: Vec = binds + .iter() + .enumerate() + .filter_map(|(idx, bind)| { + let name_range = bind_name_range(bind)?; + unused_name_ranges.contains(&name_range).then_some(idx) + }) + .collect(); + if unused_indices.is_empty() { + return Vec::new(); + } + if unused_indices.len() == binds.len() { + return vec![stmt_local.syntax().text_range()]; + } + + let entries: Vec = binds.iter().map(|bind| bind.syntax().clone()).collect(); + contiguous_runs(&unused_indices) + .into_iter() + .filter_map(|(run_start, run_end)| remove_range_for_entry_run(&entries, run_start, run_end)) + .collect() +} + +fn removal_ranges_for_member_list( + member_list: &ObjBodyMemberList, + unused_name_ranges: &HashSet, +) -> Vec { + let members: Vec = member_list.members().collect(); + let unused_indices: Vec = members + .iter() + .enumerate() + .filter_map(|(idx, member)| { + let Member::MemberBindStmt(bind_stmt) = member else { + return None; + }; + let bind = bind_stmt.obj_local()?.bind()?; + let name_range = bind_name_range(&bind)?; + unused_name_ranges.contains(&name_range).then_some(idx) + }) + .collect(); + if unused_indices.is_empty() { + return Vec::new(); + } + + let entries: Vec = members + .iter() + .map(|member| member.syntax().clone()) + .collect(); + contiguous_runs(&unused_indices) + .into_iter() + .filter_map(|(run_start, run_end)| remove_range_for_entry_run(&entries, run_start, run_end)) + .collect() +} + +fn merge_overlapping_ranges(mut ranges: Vec) -> Vec { + if ranges.is_empty() { + return ranges; + } + ranges.sort_unstable_by_key(|range| (range.start(), range.end())); + + let mut merged = Vec::with_capacity(ranges.len()); + for range in ranges { + let Some(last) = merged.last_mut() else { + merged.push(range); + continue; + }; + if range.start() <= last.end() { + let end = if range.end() > last.end() { + range.end() + } else { + last.end() + }; + *last = TextRange::new(last.start(), end); + continue; + } + merged.push(range); + } + + merged +} + +pub(super) fn removal_ranges_for_fix_all( + document: &Document, + diagnostics: &[Diagnostic], + policy: RemoveUnusedPolicy, +) -> Vec { + let unused_name_ranges = unused_binding_name_ranges(document, diagnostics); + if unused_name_ranges.is_empty() { + return Vec::new(); + } + + let mut ranges = Vec::new(); + let ast = document.ast(); + for stmt_local in ast.syntax().descendants().filter_map(StmtLocal::cast) { + ranges.extend(removal_ranges_for_stmt_local( + &stmt_local, + &unused_name_ranges, + )); + } + for member_list in ast + .syntax() + .descendants() + .filter_map(ObjBodyMemberList::cast) + { + ranges.extend(removal_ranges_for_member_list( + &member_list, + &unused_name_ranges, + )); + } + + let syntax = document.ast().syntax().clone(); + let expanded: Vec = ranges + .into_iter() + .filter_map(|range| expand_range_with_policy(&syntax, range, policy.comments)) + .collect(); + merge_overlapping_ranges(expanded) +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs new file mode 100644 index 00000000..607f94be --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs @@ -0,0 +1,213 @@ +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_inference::TypeAnalysis; +use lsp_types::{CodeLens, Uri}; + +use super::type_lenses::type_lenses; + +/// Configuration for code lens generation. +#[derive(Debug, Clone, Default)] +pub struct CodeLensConfig { + /// Show inferred types for function definitions. + pub show_types: bool, +} + +impl CodeLensConfig { + /// Create a config that shows all supported code lenses. + #[must_use] + pub fn all() -> Self { + Self { show_types: true } + } +} + +/// Generate code lenses for a document. +/// +/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure +/// that import types are properly resolved. Pass `None` to skip type lenses. +pub fn code_lens( + document: &Document, + _uri: &Uri, + config: &CodeLensConfig, + analysis: Option<&TypeAnalysis>, +) -> Vec { + if config.show_types { + if let Some(analysis) = analysis { + return type_lenses(document, analysis); + } + } + + Vec::new() +} + +/// Resolve a code lens (add command if not present). +/// +/// This is called when the client requests resolution of a code lens +/// that was returned without a command. +#[must_use] +pub fn resolve_code_lens(lens: CodeLens) -> CodeLens { + // Our code lenses always include commands, so no resolution needed. + lens +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use jrsonnet_lsp_document::{to_lsp_range, DocVersion, Document}; + use jrsonnet_lsp_inference::TypeAnalysis; + use jrsonnet_lsp_types::GlobalTyStore; + use jrsonnet_rowan_parser::{ + nodes::{Bind, BindFunction, StmtLocal}, + AstNode, SyntaxKind, + }; + use lsp_types::{CodeLens, Command, Uri}; + + use super::*; + + fn make_uri(name: &str) -> Uri { + format!("file:///test/{name}.jsonnet").parse().unwrap() + } + + fn test_analysis(doc: &Document) -> TypeAnalysis { + let global_types = Arc::new(GlobalTyStore::new()); + TypeAnalysis::analyze_with_global(doc, global_types) + } + + fn expected_function_type_lens( + doc: &Document, + analysis: &TypeAnalysis, + name: &str, + ) -> CodeLens { + let ast = doc.ast(); + let text = doc.text(); + let line_index = doc.line_index(); + let bind_func = ast + .syntax() + .descendants() + .filter_map(BindFunction::cast) + .find(|bind_func| bind_func.name().is_some_and(|n| n.syntax().text() == name)) + .expect("function binding should exist"); + let name_node = bind_func.name().expect("function should have name"); + let body = bind_func.value().expect("function should have body"); + let ty = analysis + .type_for_range(body.syntax().text_range()) + .expect("function body should have inferred type"); + let type_str = analysis.display(ty); + assert_ne!(type_str, "any", "function type lens should be informative"); + assert_ne!( + type_str, "function", + "function type lens should not be generic" + ); + CodeLens { + range: to_lsp_range(name_node.syntax().text_range(), line_index, text), + command: Some(Command { + title: format!(":: {type_str}"), + command: String::new(), + arguments: None, + }), + data: None, + } + } + + fn expected_complex_binding_type_lens(doc: &Document, analysis: &TypeAnalysis) -> CodeLens { + let ast = doc.ast(); + let text = doc.text(); + let line_index = doc.line_index(); + for node in ast.syntax().descendants() { + let Some(stmt_local) = StmtLocal::cast(node) else { + continue; + }; + for bind in stmt_local.binds() { + let Bind::BindDestruct(bd) = bind else { + continue; + }; + let Some(value) = bd.value() else { + continue; + }; + let is_complex = value.expr_base().is_some_and(|base| { + matches!( + base.syntax().kind(), + SyntaxKind::EXPR_OBJECT + | SyntaxKind::EXPR_ARRAY + | SyntaxKind::EXPR_CALL + | SyntaxKind::EXPR_IF_THEN_ELSE + ) + }); + if !is_complex { + continue; + } + let ty = analysis + .type_for_range(value.syntax().text_range()) + .expect("complex binding should have inferred type"); + let type_str = analysis.display(ty); + assert_ne!( + type_str, "any", + "complex binding type should be informative" + ); + return CodeLens { + range: to_lsp_range(bd.syntax().text_range(), line_index, text), + command: Some(Command { + title: format!(":: {type_str}"), + command: String::new(), + arguments: None, + }), + data: None, + }; + } + } + panic!("complex binding should produce a type lens"); + } + + #[test] + fn type_lens_for_function() { + let code = "local add(a, b) = a + b; add(1, 2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + let analysis = test_analysis(&doc); + + let config = CodeLensConfig { show_types: true }; + + let lenses = code_lens(&doc, &uri, &config, Some(&analysis)); + let expected = vec![expected_function_type_lens(&doc, &analysis, "add")]; + assert_eq!(lenses, expected); + } + + #[test] + fn type_lens_for_complex_binding() { + let code = "local config = { name: 'test', count: 42 }; config"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + let analysis = test_analysis(&doc); + + let config = CodeLensConfig { show_types: true }; + + let lenses = code_lens(&doc, &uri, &config, Some(&analysis)); + let expected = vec![expected_complex_binding_type_lens(&doc, &analysis)]; + assert_eq!(lenses, expected); + } + + #[test] + fn all_lenses_config_returns_types() { + let code = "local f(x) = x * 2; f(21)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + let analysis = test_analysis(&doc); + + let config = CodeLensConfig::all(); + + let lenses = code_lens(&doc, &uri, &config, Some(&analysis)); + let expected = vec![expected_function_type_lens(&doc, &analysis, "f")]; + assert_eq!(lenses, expected); + } + + #[test] + fn no_lenses_without_analysis() { + let code = "local add(a, b) = a + b; add(1, 2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + + let config = CodeLensConfig::all(); + let lenses = code_lens(&doc, &uri, &config, None); + + assert!(lenses.is_empty()); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/mod.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/mod.rs new file mode 100644 index 00000000..ecadf37b --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/mod.rs @@ -0,0 +1,9 @@ +//! Code lens handler for type annotations. +//! +//! Provides: +//! - Type annotations for function definitions + +mod dispatch; +mod type_lenses; + +pub use dispatch::{code_lens, resolve_code_lens, CodeLensConfig}; diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/type_lenses.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/type_lenses.rs new file mode 100644 index 00000000..a9921f92 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/type_lenses.rs @@ -0,0 +1,107 @@ +use jrsonnet_lsp_document::{to_lsp_range, Document}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_rowan_parser::{ + nodes::{Bind, BindFunction, StmtLocal}, + AstNode, SyntaxKind, +}; +use lsp_types::{CodeLens, Command}; + +/// Generate type annotation code lenses for function definitions. +pub(super) fn type_lenses(document: &Document, analysis: &TypeAnalysis) -> Vec { + let mut lenses = Vec::new(); + let ast = document.ast(); + let text = document.text(); + let line_index = document.line_index(); + + // Find all function definitions (local f(x) = ...) + for node in ast.syntax().descendants() { + // Look for BindFunction nodes (function definitions) + if let Some(bind_func) = BindFunction::cast(node.clone()) { + let Some(name_node) = bind_func.name() else { + continue; + }; + + // Get the type for the function body (the expression) + let Some(body) = bind_func.value() else { + continue; + }; + let body_range = body.syntax().text_range(); + let Some(ty) = analysis.type_for_range(body_range) else { + continue; + }; + + // Format the type + let type_str = analysis.display(ty); + + // Skip if it's just "any" or "function" - not informative + if type_str == "any" || type_str == "function" { + continue; + } + + let range = to_lsp_range(name_node.syntax().text_range(), line_index, text); + + lenses.push(CodeLens { + range, + command: Some(Command { + title: format!(":: {type_str}"), + command: String::new(), // No action, just informational + arguments: None, + }), + data: None, + }); + } + + // Also show types for complex local bindings (local x = { ... }) + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + if let Bind::BindDestruct(bd) = bind { + // Skip simple literals - only show for complex expressions + let Some(value) = bd.value() else { + continue; + }; + + // Check if it's a "complex" expression (object, array, function call) + let is_complex = value.expr_base().is_some_and(|base| { + matches!( + base.syntax().kind(), + SyntaxKind::EXPR_OBJECT + | SyntaxKind::EXPR_ARRAY | SyntaxKind::EXPR_CALL + | SyntaxKind::EXPR_IF_THEN_ELSE + ) + }); + + if !is_complex { + continue; + } + + // Get type for the binding value + let value_range = value.syntax().text_range(); + let Some(ty) = analysis.type_for_range(value_range) else { + continue; + }; + + let type_str = analysis.display(ty); + + // Skip uninformative types + if type_str == "any" { + continue; + } + + let range = to_lsp_range(bd.syntax().text_range(), line_index, text); + + lenses.push(CodeLens { + range, + command: Some(Command { + title: format!(":: {type_str}"), + command: String::new(), + arguments: None, + }), + data: None, + }); + } + } + } + } + + lenses +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields/items.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields/items.rs new file mode 100644 index 00000000..afe4657e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields/items.rs @@ -0,0 +1,68 @@ +use jrsonnet_lsp_document::{is_valid_jsonnet_identifier, ByteOffset, LineIndex}; +use jrsonnet_rowan_parser::{ + nodes::{FieldName, MemberFieldNormal}, + AstToken, +}; +use lsp_types::{CompletionItem, CompletionItemKind, CompletionTextEdit, Range, TextEdit}; + +pub(super) fn field_completion_item( + name: String, + detail: String, + dot_pos: usize, + offset: u32, + line_index: &LineIndex, + text: &str, +) -> Option { + if is_valid_jsonnet_identifier(&name) { + return Some(CompletionItem { + label: name, + kind: Some(CompletionItemKind::FIELD), + detail: Some(detail), + ..Default::default() + }); + } + + let start_offset = ByteOffset::new(u32::try_from(dot_pos).ok()?); + let end_offset = ByteOffset::new(offset); + let start = line_index.position(start_offset, text)?; + let end = line_index.position(end_offset, text)?; + let escaped = serde_json::to_string(&name).ok()?; + + Some(CompletionItem { + label: name, + kind: Some(CompletionItemKind::FIELD), + detail: Some(detail), + text_edit: Some(CompletionTextEdit::Edit(TextEdit { + range: Range { + start: start.into(), + end: end.into(), + }, + new_text: format!("[{escaped}]"), + })), + ..Default::default() + }) +} + +pub(super) fn extract_field_name(field: &MemberFieldNormal) -> Option { + let field_name = field.field_name()?; + extract_field_name_from_field_name(&field_name) +} + +pub(super) fn extract_field_name_from_field_name(field_name: &FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => { + if let Some(name) = fixed.id() { + if let Some(ident) = name.ident_lit() { + return Some(ident.text().to_string()); + } + } + if let Some(text) = fixed.text() { + let s = text.syntax().text().to_string(); + let s = s.trim_matches('"').trim_matches('\''); + return Some(s.to_string()); + } + None + } + FieldName::FieldNameDynamic(_) => None, + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields/lookup.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields/lookup.rs new file mode 100644 index 00000000..4372b6fe --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields/lookup.rs @@ -0,0 +1,191 @@ +use jrsonnet_rowan_parser::{ + nodes::{BindDestruct, Destruct, ObjBody, StmtLocal}, + AstNode, SyntaxKind, SyntaxNode, +}; + +use super::items::{extract_field_name, extract_field_name_from_field_name}; + +pub(super) fn find_object_fields_for_identifier( + root: &SyntaxNode, + identifier: &str, + offset: u32, +) -> Option> { + let text_size = rowan::TextSize::from(offset); + + for node in root.descendants() { + if node.kind() == SyntaxKind::STMT_LOCAL { + if node.text_range().end() > text_size { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + match &bind { + jrsonnet_rowan_parser::nodes::Bind::BindDestruct(bd) => { + if let Some(fields) = check_bind_destruct_for_object(bd, identifier) { + return Some(fields); + } + } + jrsonnet_rowan_parser::nodes::Bind::BindFunction(bf) => { + let _ = bf; + } + } + } + } + } + } + + None +} + +pub(super) fn find_bracket_lookup_target_expr_range( + root: &SyntaxNode, + identifier: &str, + key: &str, + offset: u32, +) -> Option { + let text_size = rowan::TextSize::from(offset); + + for node in root.descendants() { + if node.kind() != SyntaxKind::STMT_LOCAL { + continue; + } + if node.text_range().end() > text_size { + continue; + } + + let Some(stmt_local) = StmtLocal::cast(node.clone()) else { + continue; + }; + for bind in stmt_local.binds() { + let jrsonnet_rowan_parser::nodes::Bind::BindDestruct(bind_destruct) = bind else { + continue; + }; + if let Some(range) = + check_bind_destruct_for_bracket_target(&bind_destruct, identifier, key) + { + return Some(range); + } + } + } + + None +} + +fn check_bind_destruct_for_object(bind: &BindDestruct, identifier: &str) -> Option> { + let destruct = bind.into()?; + + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + + if ident.text() != identifier { + return None; + } + + let value_expr = bind.value()?; + extract_object_fields(value_expr.syntax()) + } else { + None + } +} + +fn check_bind_destruct_for_bracket_target( + bind: &BindDestruct, + identifier: &str, + key: &str, +) -> Option { + let destruct = bind.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() != identifier { + return None; + } + + let value_expr = bind.value()?; + find_object_field_expr_range(value_expr.syntax(), key) +} + +fn find_object_field_expr_range(expr: &SyntaxNode, key: &str) -> Option { + let obj_node = find_object_in_expr(expr)?; + let obj_body = ObjBody::cast(obj_node)?; + let ObjBody::ObjBodyMemberList(member_list) = obj_body else { + return None; + }; + + for member in member_list.members() { + let jrsonnet_rowan_parser::nodes::Member::MemberFieldNormal(field) = member else { + continue; + }; + let Some(field_name) = extract_field_name(&field) else { + continue; + }; + if field_name != key { + continue; + } + return Some(field.expr()?.syntax().text_range()); + } + + None +} + +pub(super) fn extract_object_fields(expr: &SyntaxNode) -> Option> { + let obj_node = find_object_in_expr(expr)?; + let obj_body = ObjBody::cast(obj_node)?; + let mut fields = Vec::new(); + + if let ObjBody::ObjBodyMemberList(member_list) = obj_body { + for member in member_list.members() { + match member { + jrsonnet_rowan_parser::nodes::Member::MemberFieldNormal(field) => { + if let Some(name) = extract_field_name(&field) { + fields.push(name); + } + } + jrsonnet_rowan_parser::nodes::Member::MemberFieldMethod(method) => { + if let Some(field_name) = method.field_name() { + if let Some(name) = extract_field_name_from_field_name(&field_name) { + fields.push(name); + } + } + } + _ => {} + } + } + } + + if fields.is_empty() { + None + } else { + Some(fields) + } +} + +fn find_object_in_expr(node: &SyntaxNode) -> Option { + if node.kind() == SyntaxKind::OBJ_BODY_MEMBER_LIST || node.kind() == SyntaxKind::OBJ_BODY_COMP { + return Some(node.clone()); + } + + for child in node.children() { + if child.kind() == SyntaxKind::EXPR_OBJECT { + for obj_child in child.children() { + if obj_child.kind() == SyntaxKind::OBJ_BODY_MEMBER_LIST + || obj_child.kind() == SyntaxKind::OBJ_BODY_COMP + { + return Some(obj_child); + } + } + } + + if child.kind() == SyntaxKind::EXPR { + if let Some(found) = find_object_in_expr(&child) { + return Some(found); + } + } + } + + None +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields/mod.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields/mod.rs new file mode 100644 index 00000000..4bea06f8 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields/mod.rs @@ -0,0 +1,203 @@ +//! Object field completions for `obj.` patterns. + +mod items; +mod lookup; +mod parse; + +use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_scope::ident_resolves_to_builtin_std; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::CompletionItem; + +use self::{ + items::field_completion_item, + lookup::{ + extract_object_fields, find_bracket_lookup_target_expr_range, + find_object_fields_for_identifier, + }, + parse::{bracket_index_anchor, expression_range_before_dot, parse_bracket_lookup}, +}; + +pub fn check_object_field_completion( + document: &Document, + text: &str, + offset: u32, + analysis: &TypeAnalysis, +) -> Option> { + let offset_usize = offset as usize; + let before_cursor = &text[..offset_usize]; + let dot_pos = before_cursor.rfind('.')?; + + let after_dot = &before_cursor[dot_pos + 1..]; + if after_dot.contains(char::is_whitespace) && !after_dot.trim().is_empty() { + return None; + } + + let before_dot = &before_cursor[..dot_pos]; + let ident_start = before_dot + .rfind(|c: char| !c.is_ascii_alphanumeric() && c != '_') + .map_or(0, |i| i + 1); + let identifier = before_dot[ident_start..].trim(); + let ast = document.ast(); + let line_index = document.line_index(); + + if identifier == "std" { + let before_dot_pos = if dot_pos > 0 { dot_pos - 1 } else { 0 }; + let before_dot_offset = ByteOffset::new(u32::try_from(before_dot_pos).ok()?); + let token = token_at_offset(ast.syntax(), before_dot_offset)?; + if token.kind() == SyntaxKind::IDENT && ident_resolves_to_builtin_std(&token) { + return None; + } + } + + let prefix = after_dot.trim(); + let before_dot_pos = if dot_pos > 0 { dot_pos - 1 } else { 0 }; + let before_dot_offset = u32::try_from(before_dot_pos).ok()?; + let dot_offset = u32::try_from(dot_pos).ok()?; + + if let Some(expr_range) = + expression_range_before_dot(ast.syntax(), before_dot_offset, dot_offset) + { + if let Some(fields) = analysis.fields_for_range(expr_range) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|(name, ty)| { + field_completion_item( + name, + store.display(ty), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + + if !items.is_empty() { + return Some(items); + } + } + } + + if let Some(index_anchor) = bracket_index_anchor(before_cursor, dot_pos) { + let index_anchor_text_size = rowan::TextSize::from(index_anchor); + if let Some(fields) = analysis.fields_at_position(ast.syntax(), index_anchor_text_size) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|(name, ty)| { + field_completion_item( + name, + store.display(ty), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + + if !items.is_empty() { + return Some(items); + } + } + } + + let before_dot_text_size = rowan::TextSize::from(before_dot_offset); + if let Some(fields) = analysis.fields_at_position(ast.syntax(), before_dot_text_size) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|(name, ty)| { + field_completion_item(name, store.display(ty), dot_pos, offset, line_index, text) + }) + .collect::>(); + + if !items.is_empty() { + return Some(items); + } + } + + if let Some((base_identifier, key)) = parse_bracket_lookup(before_dot) { + if let Some(target_range) = + find_bracket_lookup_target_expr_range(ast.syntax(), &base_identifier, &key, dot_offset) + { + if let Some(fields) = analysis.fields_for_range(target_range) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|(name, ty)| { + field_completion_item( + name, + store.display(ty), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + if !items.is_empty() { + return Some(items); + } + } + + if let Some(target_node) = ast + .syntax() + .descendants() + .find(|node| node.text_range() == target_range) + { + if let Some(fields) = extract_object_fields(&target_node) { + let items = fields + .into_iter() + .filter(|name| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|name| { + field_completion_item( + name, + "object field".to_string(), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + if !items.is_empty() { + return Some(items); + } + } + } + } + } + + if identifier.is_empty() { + return None; + } + + let fields = find_object_fields_for_identifier(ast.syntax(), identifier, dot_offset)?; + let items = fields + .into_iter() + .filter(|f| prefix.is_empty() || f.starts_with(prefix)) + .filter_map(|name| { + field_completion_item( + name, + "object field".to_string(), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + + if items.is_empty() { + return None; + } + Some(items) +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields/parse.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields/parse.rs new file mode 100644 index 00000000..cea94c1c --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields/parse.rs @@ -0,0 +1,71 @@ +use jrsonnet_lsp_document::{token_at_offset, ByteOffset}; +use jrsonnet_rowan_parser::{nodes::Expr, AstNode, SyntaxNode}; + +pub(super) fn expression_range_before_dot( + root: &SyntaxNode, + before_dot_offset: u32, + dot_offset: u32, +) -> Option { + let token = token_at_offset(root, ByteOffset::new(before_dot_offset))?; + let dot = rowan::TextSize::from(dot_offset); + token + .parent_ancestors() + .filter_map(Expr::cast) + .map(|expr| expr.syntax().text_range()) + .filter(|range| range.end() == dot) + .min_by_key(|range| range.len()) +} + +pub(super) fn bracket_index_anchor(before_cursor: &str, dot_pos: usize) -> Option { + let before_dot = before_cursor.get(..dot_pos)?; + let mut bracket_depth = 0usize; + + for (index, byte) in before_dot.as_bytes().iter().enumerate().rev() { + match *byte { + b']' => bracket_depth += 1, + b'[' => { + if bracket_depth == 0 { + continue; + } + bracket_depth -= 1; + if bracket_depth == 0 { + return u32::try_from(index).ok(); + } + } + _ => {} + } + } + + None +} + +pub(super) fn parse_bracket_lookup(before_dot: &str) -> Option<(String, String)> { + let trimmed = before_dot.trim_end(); + let close_bracket = trimmed.rfind(']')?; + if close_bracket + 1 != trimmed.len() { + return None; + } + + let open_bracket = trimmed[..close_bracket].rfind('[')?; + let base_expr = trimmed[..open_bracket].trim_end(); + let key_expr = trimmed[open_bracket + 1..close_bracket].trim(); + let key = key_expr + .strip_prefix('"') + .and_then(|value| value.strip_suffix('"')) + .or_else(|| { + key_expr + .strip_prefix('\'') + .and_then(|value| value.strip_suffix('\'')) + })? + .to_string(); + + let ident_start = base_expr + .rfind(|c: char| !c.is_ascii_alphanumeric() && c != '_') + .map_or(0, |index| index + 1); + let identifier = base_expr[ident_start..].trim(); + if identifier.is_empty() { + return None; + } + + Some((identifier.to_string(), key)) +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch.rs new file mode 100644 index 00000000..b98e1fd7 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch.rs @@ -0,0 +1,51 @@ +use std::path::{Path, PathBuf}; + +use jrsonnet_lsp_document::{token_at_offset, Document, LspPosition}; +use jrsonnet_lsp_inference::{SemanticArtifacts, TypeAnalysis}; +use jrsonnet_rowan_parser::AstNode; +use lsp_types::CompletionList; + +mod imports; +mod object_fields; +mod scope; +mod stdlib; +#[cfg(test)] +mod test_util; + +pub(super) fn completion_dispatch( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + import_roots: &[PathBuf], + analysis: &TypeAnalysis, + semantic: Option<&SemanticArtifacts>, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(position, text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset); + let text_offset: rowan::TextSize = offset.into(); + + if let Some(result) = stdlib::try_stdlib_completion(token.as_ref(), text, text_offset) { + return Some(result); + } + + if let Some(result) = imports::try_import_completion(text, text_offset, doc_path, import_roots) + { + return Some(result); + } + + if let Some(result) = + object_fields::try_object_field_completion(document, text, text_offset, analysis) + { + return Some(result); + } + + Some(scope::general_completion( + document, + position, + text_offset, + semantic, + )) +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/imports.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/imports.rs new file mode 100644 index 00000000..71099cfa --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/imports.rs @@ -0,0 +1,69 @@ +use std::path::{Path, PathBuf}; + +use lsp_types::CompletionList; +use tracing::debug; + +use super::super::super::imports::check_import_completion; + +pub(super) fn try_import_completion( + text: &str, + offset: rowan::TextSize, + doc_path: Option<&Path>, + import_roots: &[PathBuf], +) -> Option { + let items = check_import_completion(text, offset.into(), doc_path, import_roots)?; + debug!(count = items.len(), "providing import completions"); + Some(CompletionList { + is_incomplete: false, + items, + }) +} + +#[cfg(test)] +mod tests { + use super::super::{ + super::completion, + test_util::{test_analysis, test_document}, + }; + use crate::completion::imports::find_import_string_start; + + #[test] + fn test_import_string_detection() { + let text1 = r#"import ""#; + assert_eq!(find_import_string_start(text1), Some(8)); + + let text2 = r#"importstr ""#; + assert_eq!(find_import_string_start(text2), Some(11)); + + let text3 = r#"local x = ""#; + assert_eq!(find_import_string_start(text3), None); + + let text4 = r#"import "foo.jsonnet""#; + assert_eq!(find_import_string_start(text4), None); + } + + #[test] + fn test_import_completion_with_path() { + use tempfile::TempDir; + + let temp_dir = TempDir::new().unwrap(); + let temp_path = temp_dir.path(); + + std::fs::write(temp_path.join("utils.libsonnet"), "{}").unwrap(); + std::fs::write(temp_path.join("config.jsonnet"), "{}").unwrap(); + std::fs::create_dir(temp_path.join("lib")).unwrap(); + std::fs::write(temp_path.join("lib/helper.libsonnet"), "{}").unwrap(); + + let doc_path = temp_path.join("main.jsonnet"); + let code = r#"import ""#; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 8).into(); + + let list = completion(&doc, pos, Some(&doc_path), &analysis) + .expect("should get import completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["config.jsonnet", "lib", "utils.libsonnet"]); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/object_fields.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/object_fields.rs new file mode 100644 index 00000000..0123cd3b --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/object_fields.rs @@ -0,0 +1,354 @@ +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_inference::TypeAnalysis; +use lsp_types::CompletionList; +use tracing::debug; + +use super::super::super::fields::check_object_field_completion; + +pub(super) fn try_object_field_completion( + document: &Document, + text: &str, + offset: rowan::TextSize, + analysis: &TypeAnalysis, +) -> Option { + let items = check_object_field_completion(document, text, offset.into(), analysis)?; + debug!(count = items.len(), "providing object field completions"); + Some(CompletionList { + is_incomplete: false, + items, + }) +} + +#[cfg(test)] +mod tests { + use lsp_types::{CompletionItem, CompletionItemKind}; + + use super::super::{ + super::completion, + test_util::{test_analysis, test_document}, + }; + + #[test] + fn test_shadowed_std_uses_object_field_completion() { + let code = "local std = { foo: 1 }; std."; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + + let result = completion(&doc, pos, None, &analysis).expect("should get completions"); + assert_eq!( + result.items, + vec![CompletionItem { + label: "foo".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: None, + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }] + ); + } + + #[test] + fn test_completion_item_has_documentation() { + let code = "local obj = { foo: 1 }; obj."; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 28).into(); + + let result = completion(&doc, pos, None, &analysis).expect("should get completions"); + assert_eq!( + result.items, + vec![CompletionItem { + label: "foo".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: None, + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }] + ); + } + + #[test] + fn test_object_field_completion() { + let code = "local obj = { foo: 1, bar: 2 }; obj."; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 36).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get object field completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["bar", "foo"]); + } + + #[test] + fn test_object_field_completion_with_prefix() { + let code = "local obj = { foo: 1, bar: 2 }; obj.f"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 37).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get object field completions with prefix"); + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + assert_eq!(labels, vec!["foo"]); + } + + #[test] + fn test_object_field_completion_non_identifier_uses_bracket_text_edit() { + let code = r#"local obj = { "my-field": 1, normal: 2 }; obj."#; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + let dot = u32::try_from(code.rfind('.').expect("dot should exist")).unwrap(); + let end = u32::try_from(code.len()).unwrap(); + + let mut result = completion(&doc, pos, None, &analysis).expect("should get completions"); + result + .items + .sort_by(|left, right| left.label.cmp(&right.label)); + + assert_eq!( + result.items, + vec![ + CompletionItem { + label: "my-field".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: Some(lsp_types::CompletionTextEdit::Edit(lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position::new(0, dot), + end: lsp_types::Position::new(0, end), + }, + new_text: r#"["my-field"]"#.to_string(), + })), + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }, + CompletionItem { + label: "normal".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: None, + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }, + ] + ); + } + + #[test] + fn test_object_field_completion_non_identifier_prefix_rewrites_dot_expression() { + let code = r#"local obj = { "my-field": 1, normal: 2 }; obj.my"#; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + let dot = u32::try_from(code.rfind('.').expect("dot should exist")).unwrap(); + let end = u32::try_from(code.len()).unwrap(); + + let result = completion(&doc, pos, None, &analysis).expect("should get completions"); + assert_eq!( + result.items, + vec![CompletionItem { + label: "my-field".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: Some(lsp_types::CompletionTextEdit::Edit(lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position::new(0, dot), + end: lsp_types::Position::new(0, end), + }, + new_text: r#"["my-field"]"#.to_string(), + })), + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }] + ); + } + + #[test] + fn test_object_field_completion_with_types() { + let code = "local obj = { num: 42, str: \"hello\", arr: [1, 2] }; obj."; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 56).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get object field completions"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "arr".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("[number, number]".to_string()), + ..Default::default() + }, + CompletionItem { + label: "num".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + CompletionItem { + label: "str".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("string".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_object_field_completion_nested() { + let code = "local obj = { inner: { x: 1, y: 2 } }; obj.inner."; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 49).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get nested object field completions"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "x".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + CompletionItem { + label: "y".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_object_field_completion_after_bracket_lookup() { + let code = r#"local hm = { foo: { a: true, b: 4, c: "hi" } }; hm["foo"]."#; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get object field completions after bracket lookup"); + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "a".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("true".to_string()), + ..Default::default() + }, + CompletionItem { + label: "b".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + CompletionItem { + label: "c".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("string".to_string()), + ..Default::default() + }, + ] + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/scope.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/scope.rs new file mode 100644 index 00000000..da84454c --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/scope.rs @@ -0,0 +1,269 @@ +use jrsonnet_lsp_document::{Document, LspPosition}; +use jrsonnet_lsp_inference::SemanticArtifacts; +use jrsonnet_rowan_parser::AstNode; +use lsp_types::CompletionList; + +use super::super::{ + super::{ + helpers::{get_identifier_prefix, is_inside_object}, + locals::get_local_completions_with_semantic, + }, + keywords::{add_object_keyword_completions, add_std_completion}, +}; + +pub(super) fn general_completion( + document: &Document, + position: LspPosition, + offset: rowan::TextSize, + semantic: Option<&SemanticArtifacts>, +) -> CompletionList { + let text = document.text(); + let ast = document.ast(); + let mut items = + get_local_completions_with_semantic(document, position, text, offset.into(), semantic); + + add_std_completion(&mut items); + + if is_inside_object(ast.syntax(), offset.into()) { + let prefix = get_identifier_prefix(text, offset.into()); + add_object_keyword_completions(&mut items, prefix); + } + + CompletionList { + is_incomplete: false, + items, + } +} + +#[cfg(test)] +mod tests { + use lsp_types::{CompletionItem, CompletionItemKind}; + + use super::super::{ + super::completion, + test_util::{test_analysis, test_document}, + }; + + #[test] + fn test_completion_includes_std_and_local() { + let code = "local x = 1; "; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 13).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_local_variable_completion() { + let code = "local foo = 1; local bar = 2; "; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 30).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["bar", "foo", "std"]); + } + + #[test] + fn test_function_parameter_completion() { + let code = "local f(x, y) = x + "; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 20).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["f", "std", "x", "y"]); + } + + #[test] + fn test_completion_with_prefix() { + let code = "local foo = 1; local bar = 2; f"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 31).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["foo", "std"]); + } + + #[test] + fn test_for_comprehension_variable_completion() { + let code = "[x for x in [1, 2, 3] if ]"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 25).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in for comprehension"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_object_local_completion() { + let code = "{ local helper = 1, field: }"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 27).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get completions in object"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["$", "helper", "self", "std", "super"]); + } + + #[test] + fn test_nested_function_completion() { + let code = "local outer(a) = local inner(b) = a + ; inner(1); outer(1)"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 37).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in nested function"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["a", "b", "std"]); + } + + #[test] + fn test_completion_at_eof() { + let code = "local x = 1;\n"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (1, 0).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions at EOF"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_completion_in_object() { + let code = "{ foo: 1, bar: }"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 14).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get completions in object"); + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "$".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to root object".to_string()), + ..Default::default() + }, + CompletionItem { + label: "self".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to current object".to_string()), + ..Default::default() + }, + CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }, + CompletionItem { + label: "super".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to inherited object".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_no_self_super_outside_object() { + let code = "local x = 1; x"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 13).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get completions"); + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }, + CompletionItem { + label: "x".to_string(), + kind: Some(CompletionItemKind::VARIABLE), + detail: Some("local variable".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_multiple_for_specs_completion() { + let code = "[x + y for x in [1] for y in [2]]"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 3).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in multi-for comprehension"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x", "y"]); + } + + #[test] + fn test_completion_with_syntax_error() { + let code = "local x = 1;\nlocal y = 2;\n{ foo: x +"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (2, 10).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions despite syntax error"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["$", "self", "std", "super", "x", "y"]); + } + + #[test] + fn test_shadowed_variable_completion() { + let code = "local x = 1;\nlocal f(x) = x +"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (1, 16).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get completions with shadowing"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["f", "std", "x"]); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/stdlib.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/stdlib.rs new file mode 100644 index 00000000..657fbe36 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/stdlib.rs @@ -0,0 +1,68 @@ +use jrsonnet_rowan_parser::SyntaxToken; +use lsp_types::CompletionList; +use tracing::debug; + +use super::super::super::stdlib::check_stdlib_completion; + +pub(super) fn try_stdlib_completion( + token: Option<&SyntaxToken>, + text: &str, + offset: rowan::TextSize, +) -> Option { + let token = token?; + let items = check_stdlib_completion(token, text, offset.into())?; + debug!(count = items.len(), "providing stdlib completions"); + Some(CompletionList { + is_incomplete: false, + items, + }) +} + +#[cfg(test)] +mod tests { + use super::super::{ + super::completion, + test_util::{test_analysis, test_document}, + }; + + #[test] + fn test_stdlib_completion_with_prefix_xor() { + let code = "std.xo"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 6).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + assert_eq!(labels, vec!["xor"]); + } + + #[test] + fn test_stdlib_completion_with_prefix_x() { + let code = "std.x"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 5).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["xnor", "xor"]); + } + + #[test] + fn test_stdlib_completion_with_alias_prefix_xo() { + let code = "local s = std; s.xo"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + assert_eq!(labels, vec!["xor"]); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/test_util.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/test_util.rs new file mode 100644 index 00000000..48cfd3ef --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/test_util.rs @@ -0,0 +1,14 @@ +use std::sync::Arc; + +use jrsonnet_lsp_document::{DocVersion, Document}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_types::GlobalTyStore; + +pub(super) fn test_analysis(doc: &Document) -> TypeAnalysis { + let global_types = Arc::new(GlobalTyStore::new()); + TypeAnalysis::analyze_with_global(doc, global_types) +} + +pub(super) fn test_document(code: &str) -> Document { + Document::new(code.to_string(), DocVersion::new(1)) +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/keywords.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/keywords.rs new file mode 100644 index 00000000..c837f67f --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/keywords.rs @@ -0,0 +1,40 @@ +use lsp_types::{CompletionItem, CompletionItemKind}; + +pub(super) fn add_std_completion(items: &mut Vec) { + // `std` is always available in scope. + items.push(CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }); +} + +pub(super) fn add_object_keyword_completions(items: &mut Vec, prefix: &str) { + if prefix.is_empty() || "$".starts_with(prefix) { + items.push(CompletionItem { + label: "$".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to root object".to_string()), + ..Default::default() + }); + } + + if prefix.is_empty() || "self".starts_with(prefix) { + items.push(CompletionItem { + label: "self".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to current object".to_string()), + ..Default::default() + }); + } + + if prefix.is_empty() || "super".starts_with(prefix) { + items.push(CompletionItem { + label: "super".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to inherited object".to_string()), + ..Default::default() + }); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/mod.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/mod.rs new file mode 100644 index 00000000..b931825e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/mod.rs @@ -0,0 +1,70 @@ +//! Completion handler for providing code completions. +//! +//! Supports: +//! - Standard library functions (triggered by `std.`) +//! - Local variables in scope +//! - Object field completion (triggered by `obj.`) +//! - Import path completion (inside import strings) + +mod dispatch; +mod keywords; + +use std::path::{Path, PathBuf}; + +use jrsonnet_lsp_document::{Document, LspPosition}; +use jrsonnet_lsp_inference::{SemanticArtifacts, TypeAnalysis}; +use lsp_types::CompletionList; + +/// Get completion items for the given position. +/// +/// `doc_path` is the path to the current document, used for import path completion. +/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure +/// that import types are properly resolved. +pub fn completion( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + analysis: &TypeAnalysis, +) -> Option { + completion_with_import_roots_and_semantic(document, position, doc_path, &[], analysis, None) +} + +/// Get completion items with explicit import search roots. +/// +/// `import_roots` is typically configured from server `jpath` entries and used +/// when completing inside import strings. +pub fn completion_with_import_roots( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + import_roots: &[PathBuf], + analysis: &TypeAnalysis, +) -> Option { + completion_with_import_roots_and_semantic( + document, + position, + doc_path, + import_roots, + analysis, + None, + ) +} + +/// Get completion items with explicit import roots and semantic artifacts. +pub fn completion_with_import_roots_and_semantic( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + import_roots: &[PathBuf], + analysis: &TypeAnalysis, + semantic: Option<&SemanticArtifacts>, +) -> Option { + dispatch::completion_dispatch( + document, + position, + doc_path, + import_roots, + analysis, + semantic, + ) +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/helpers.rs b/crates/jrsonnet-lsp-handlers/src/completion/helpers.rs new file mode 100644 index 00000000..c4594828 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/helpers.rs @@ -0,0 +1,52 @@ +//! Shared helper functions for completion. + +use jrsonnet_lsp_document::ByteOffset; +use jrsonnet_rowan_parser::{SyntaxKind, SyntaxNode}; + +/// Get the identifier prefix at the current position. +pub fn get_identifier_prefix(text: &str, offset: usize) -> &str { + if offset == 0 { + return ""; + } + + let before = &text[..offset]; + let start = before + .rfind(|c: char| !c.is_ascii_alphanumeric() && c != '_') + .map_or(0, |i| i + 1); + + &before[start..] +} + +/// Check if the given offset is inside an object body. +/// +/// Walks up the AST from the token at the offset looking for object-related nodes. +pub fn is_inside_object(root: &SyntaxNode, offset: ByteOffset) -> bool { + use jrsonnet_lsp_document::find_node_at_offset; + + // Find the node at the offset + let Some(node) = find_node_at_offset(root, offset) else { + return false; + }; + + // Walk up the tree looking for object body nodes + let mut current = node; + loop { + match current.kind() { + // Object body nodes indicate we're inside an object + SyntaxKind::OBJ_BODY_MEMBER_LIST + | SyntaxKind::OBJ_BODY_COMP + // Object expression node + | SyntaxKind::EXPR_OBJECT => { + return true; + } + _ => {} + } + + let Some(parent) = current.parent() else { + break; + }; + current = parent; + } + + false +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/imports.rs b/crates/jrsonnet-lsp-handlers/src/completion/imports.rs new file mode 100644 index 00000000..b86544ef --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/imports.rs @@ -0,0 +1,230 @@ +//! Import path completions for `import` statements. + +use std::{ + collections::HashSet, + path::{Path, PathBuf}, +}; + +use lsp_types::{CompletionItem, CompletionItemKind}; + +/// Check if we're inside an import string and provide path completions. +pub fn check_import_completion( + text: &str, + offset: u32, + doc_path: Option<&Path>, + import_roots: &[PathBuf], +) -> Option> { + let offset_usize = offset as usize; + + // Find if we're inside a string that's part of an import + // Look for patterns like: import "..., import '..., importstr "..., importstr '... + let before_cursor = &text[..offset_usize]; + + // Find the start of the current string + let string_start = find_import_string_start(before_cursor)?; + + // Extract the partial path typed so far + let partial_path = &before_cursor[string_start..]; + + let (dir_part, prefix) = split_partial_path(partial_path); + let roots = import_search_roots(doc_path, import_roots); + if roots.is_empty() { + return None; + } + + let mut items = Vec::new(); + let mut seen = HashSet::new(); + + for root in roots { + let search_dir = if dir_part.is_empty() { + root + } else { + root.join(dir_part) + }; + + let Ok(entries) = std::fs::read_dir(&search_dir) else { + continue; + }; + + for entry in entries.filter_map(std::result::Result::ok) { + let name = entry.file_name().to_string_lossy().to_string(); + + // Filter by prefix + if !prefix.is_empty() && !name.starts_with(prefix) { + continue; + } + + let path = entry.path(); + let is_dir = path.is_dir(); + + // For files, only show .jsonnet and .libsonnet files + if !is_dir { + let ext = path.extension().and_then(|e| e.to_str()).unwrap_or(""); + if ext != "jsonnet" && ext != "libsonnet" && ext != "json" { + continue; + } + } + + let (kind, insert_text) = if is_dir { + (CompletionItemKind::FOLDER, format!("{name}/")) + } else { + (CompletionItemKind::FILE, name.clone()) + }; + + let key = (is_dir, insert_text.clone()); + if !seen.insert(key) { + continue; + } + + items.push(CompletionItem { + label: name, + kind: Some(kind), + insert_text: Some(insert_text), + ..Default::default() + }); + } + } + + if items.is_empty() { + None + } else { + // Sort: directories first, then files + items.sort_by(|a, b| { + let a_is_dir = a.kind == Some(CompletionItemKind::FOLDER); + let b_is_dir = b.kind == Some(CompletionItemKind::FOLDER); + match (a_is_dir, b_is_dir) { + (true, false) => std::cmp::Ordering::Less, + (false, true) => std::cmp::Ordering::Greater, + _ => a.label.cmp(&b.label), + } + }); + Some(items) + } +} + +fn split_partial_path(partial_path: &str) -> (&str, &str) { + let Some(last_slash) = partial_path.rfind('/') else { + return ("", partial_path); + }; + let dir_part = &partial_path[..last_slash]; + let prefix = &partial_path[last_slash + 1..]; + (dir_part, prefix) +} + +fn import_search_roots(doc_path: Option<&Path>, import_roots: &[PathBuf]) -> Vec { + let mut roots = Vec::new(); + if let Some(doc_dir) = doc_path.and_then(Path::parent) { + roots.push(doc_dir.to_path_buf()); + } + roots.extend(import_roots.iter().cloned()); + + // Keep the first occurrence for stable precedence (document dir before jpath). + let mut deduped = Vec::new(); + let mut seen = HashSet::new(); + for root in roots { + if seen.insert(root.clone()) { + deduped.push(root); + } + } + deduped +} + +/// Find the start of an import string, returning the position after the opening quote. +#[must_use] +pub fn find_import_string_start(text: &str) -> Option { + // Look backwards for import keyword followed by a string + // Patterns: import ", import ', importstr ", importstr ', importbin ", importbin ' + + // Find the last opening quote + let last_double = text.rfind('"'); + let last_single = text.rfind('\''); + + let (quote_pos, _quote_char) = match (last_double, last_single) { + (Some(d), Some(s)) => { + if d > s { + (d, '"') + } else { + (s, '\'') + } + } + (Some(d), None) => (d, '"'), + (None, Some(s)) => (s, '\''), + (None, None) => return None, + }; + + // Check that there's no closing quote after this opening quote + // (i.e., we're inside the string, not after it) + let after_quote = &text[quote_pos + 1..]; + if after_quote.contains('"') || after_quote.contains('\'') { + return None; + } + + // Check that before the quote we have an import keyword + let before_quote = text[..quote_pos].trim_end(); + + if before_quote.ends_with("import") + || before_quote.ends_with("importstr") + || before_quote.ends_with("importbin") + { + return Some(quote_pos + 1); + } + + None +} + +#[cfg(test)] +mod tests { + use std::fs; + + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_import_completion_searches_jpath_roots() { + let tmp = TempDir::new().expect("temp dir should be created"); + let workspace = tmp.path().join("workspace"); + let jpath = tmp.path().join("jpath"); + fs::create_dir_all(&workspace).expect("workspace should be created"); + fs::create_dir_all(&jpath).expect("jpath should be created"); + + let doc_path = workspace.join("main.jsonnet"); + fs::write(&doc_path, r#"import "sh""#).expect("main file should be created"); + fs::write(workspace.join("local.libsonnet"), "{}").expect("local import should be created"); + fs::write(jpath.join("shared.libsonnet"), "{}").expect("jpath import should be created"); + + let source = r#"import "sh"#; + let cursor_offset = u32::try_from(source.len()).expect("test source should fit in u32"); + let items = check_import_completion(source, cursor_offset, Some(&doc_path), &[jpath]) + .expect("should have import completions"); + + let labels: Vec<_> = items.iter().map(|item| item.label.as_str()).collect(); + assert_eq!(labels, vec!["shared.libsonnet"]); + } + + #[test] + fn test_import_completion_dedupes_across_roots() { + let tmp = TempDir::new().expect("temp dir should be created"); + let workspace = tmp.path().join("workspace"); + let jpath = tmp.path().join("jpath"); + fs::create_dir_all(&workspace).expect("workspace should be created"); + fs::create_dir_all(&jpath).expect("jpath should be created"); + + let doc_path = workspace.join("main.jsonnet"); + fs::write(&doc_path, r#"import """#).expect("main file should be created"); + fs::write(workspace.join("dup.libsonnet"), "{}") + .expect("workspace import should be created"); + fs::write(jpath.join("dup.libsonnet"), "{}").expect("jpath import should be created"); + + let source = r#"import ""#; + let cursor_offset = u32::try_from(source.len()).expect("test source should fit in u32"); + let items = check_import_completion(source, cursor_offset, Some(&doc_path), &[jpath]) + .expect("should have import completions"); + + let dup_count = items + .iter() + .filter(|item| item.label == "dup.libsonnet") + .count(); + assert_eq!(dup_count, 1, "duplicate entries should be deduped"); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/locals.rs b/crates/jrsonnet-lsp-handlers/src/completion/locals.rs new file mode 100644 index 00000000..2b3b371b --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/locals.rs @@ -0,0 +1,50 @@ +//! Local variable completions. + +use jrsonnet_lsp_document::{Document, LspPosition}; +use jrsonnet_lsp_inference::SemanticArtifacts; +use lsp_types::{CompletionItem, CompletionItemKind}; + +use super::helpers::get_identifier_prefix; +use crate::definition::{collect_visible_bindings_with_semantic, BindingKind}; + +/// Get completions for local variables in scope using semantic artifacts when available. +pub fn get_local_completions_with_semantic( + document: &Document, + position: LspPosition, + text: &str, + offset: u32, + semantic: Option<&SemanticArtifacts>, +) -> Vec { + // Get the prefix the user is typing (if any) + let prefix = get_identifier_prefix(text, offset as usize); + + // Collect all visible bindings + let bindings = collect_visible_bindings_with_semantic(document, position, semantic); + + bindings + .into_iter() + .filter(|b| prefix.is_empty() || b.name.starts_with(prefix)) + .map(|binding| { + let kind = match binding.kind { + BindingKind::LocalFunction => CompletionItemKind::FUNCTION, + BindingKind::LocalVariable | BindingKind::Parameter | BindingKind::ForVariable => { + CompletionItemKind::VARIABLE + } + }; + + let detail = match binding.kind { + BindingKind::LocalVariable => "local variable", + BindingKind::LocalFunction => "local function", + BindingKind::Parameter => "parameter", + BindingKind::ForVariable => "for variable", + }; + + CompletionItem { + label: binding.name, + kind: Some(kind), + detail: Some(detail.to_string()), + ..Default::default() + } + }) + .collect() +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/mod.rs b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs new file mode 100644 index 00000000..901402be --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs @@ -0,0 +1,19 @@ +//! Completion handler for providing code completions. +//! +//! Supports: +//! - Standard library functions (triggered by `std.`) +//! - Local variables in scope +//! - Object field completion (triggered by `obj.`) +//! - Import path completion (inside import strings) + +mod fields; +mod handler; +mod helpers; +mod imports; +mod locals; +mod stdlib; + +pub use handler::{ + completion, completion_with_import_roots, completion_with_import_roots_and_semantic, +}; +pub use imports::find_import_string_start; diff --git a/crates/jrsonnet-lsp-handlers/src/completion/stdlib.rs b/crates/jrsonnet-lsp-handlers/src/completion/stdlib.rs new file mode 100644 index 00000000..4d984dba --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/stdlib.rs @@ -0,0 +1,58 @@ +//! Standard library completions for `std.` prefix. + +use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; +use jrsonnet_lsp_stdlib as stdlib; +use jrsonnet_rowan_parser::{nodes::ExprField, AstNode, SyntaxKind, SyntaxToken}; +use lsp_types::{CompletionItem, CompletionItemKind, Documentation, MarkupContent, MarkupKind}; + +/// Check if we should provide stdlib completions. +pub fn check_stdlib_completion( + token: &SyntaxToken, + _text: &str, + _offset: u32, +) -> Option> { + // Cursor is right after the dot in `x.|`. + if token.kind() == SyntaxKind::DOT { + let parent = token.parent()?; + let field = parent.ancestors().find_map(ExprField::cast)?; + let base = field.base()?; + if expr_resolves_to_builtin_std(&base) { + return Some(get_stdlib_completions("")); + } + return None; + } + + // Cursor is inside the field name in `x.fo|`. + if token.kind() == SyntaxKind::IDENT { + let prev = token.prev_token()?; + if prev.kind() != SyntaxKind::DOT { + return None; + } + let parent = token.parent()?; + let field = parent.ancestors().find_map(ExprField::cast)?; + let base = field.base()?; + if expr_resolves_to_builtin_std(&base) { + return Some(get_stdlib_completions(token.text())); + } + } + + None +} + +/// Get stdlib completion items, filtered by prefix. +pub fn get_stdlib_completions(prefix: &str) -> Vec { + stdlib::get_all_stdlib_docs() + .filter(|doc| doc.name.starts_with(prefix)) + .map(|doc| CompletionItem { + label: doc.name.to_string(), + kind: Some(CompletionItemKind::FUNCTION), + detail: Some(format!("std.{}{}", doc.name, doc.signature)), + documentation: Some(Documentation::MarkupContent(MarkupContent { + kind: MarkupKind::Markdown, + value: doc.description.to_string(), + })), + insert_text: Some(doc.name.to_string()), + ..Default::default() + }) + .collect() +} diff --git a/crates/jrsonnet-lsp-handlers/src/definition/field_path.rs b/crates/jrsonnet-lsp-handlers/src/definition/field_path.rs new file mode 100644 index 00000000..906d87d7 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/definition/field_path.rs @@ -0,0 +1,155 @@ +use jrsonnet_lsp_document::Document; +use jrsonnet_rowan_parser::{ + nodes::{ExprBase, ExprField}, + AstNode, SyntaxToken, +}; +use tracing::debug; + +use super::{import::resolve_base_to_import, DefinitionResult}; + +/// Check if the token is a field name in a field access expression where the base +/// is defined as an import. For example, `foo` in `lib.foo` where `lib = import "..."`. +/// +/// Returns `DefinitionResult::ImportField` if this is a field access on an import. +pub(super) fn check_field_access_on_import( + token: &SyntaxToken, + _field_name: &str, + document: &Document, +) -> Option { + // Check if we're inside an ExprField (field access like .foo) + let expr_field = token.parent_ancestors().find_map(ExprField::cast)?; + + // Verify this token is the field name in the ExprField + let field_name_node = expr_field.field()?; + if field_name_node.syntax().text_range() != token.parent()?.text_range() { + return None; + } + + // Build the field chain by walking up the ExprField chain + // We collect fields from innermost to outermost, then reverse + let mut fields = Vec::new(); + let mut current_field = expr_field; + + // Add the current field name + if let Some(name) = current_field.field() { + if let Some(ident) = name.ident_lit() { + fields.push(ident.text().to_string()); + } + } + + // Walk up the ExprField chain (from outermost to innermost in the code) + // For `lib.foo.bar`, the current_field starts at `.bar`, its base is ExprField `.foo` + let base = loop { + // Get the base of the current field access + let base_expr = current_field.base()?; + + match base_expr.expr_base()? { + ExprBase::ExprField(inner_field) => { + // Another field access - collect its field name and continue + if let Some(name) = inner_field.field() { + if let Some(ident) = name.ident_lit() { + fields.push(ident.text().to_string()); + } + } + current_field = inner_field; + } + other => { + // Found the base (could be ExprVar, ExprImport, etc.) + break other; + } + } + }; + + // Reverse to get fields in order from base outward + fields.reverse(); + + // Now check if the base resolves to an import + let (import_path, mut base_fields) = resolve_base_to_import(&base, document)?; + + // Combine base fields (from tracing through bindings) with field chain + base_fields.extend(fields); + + debug!( + import_path = %import_path, + fields = ?base_fields, + "found import field definition" + ); + + Some(DefinitionResult::ImportField { + path: import_path, + fields: base_fields, + }) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use lsp_types::Range; + + use super::super::{goto_declaration, goto_definition, DefinitionResult}; + + fn expect_import_field( + result: Option, + expected_path: &str, + expected_fields: &[&str], + ) { + match result { + Some(DefinitionResult::ImportField { path, fields }) => { + assert_eq!(path, expected_path); + assert_eq!(fields, expected_fields); + } + other => panic!("Expected ImportField definition, got {other:?}"), + } + } + + fn expect_local(result: Option) -> Range { + match result { + Some(DefinitionResult::Local(range)) => range, + other => panic!("Expected Local definition, got {other:?}"), + } + } + + #[test] + fn test_field_access_on_import() { + let code = r#"local lib = import "lib.libsonnet"; lib.foo"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 40).into()); + expect_import_field(result, "lib.libsonnet", &["foo"]); + } + + #[test] + fn test_nested_field_access_on_import() { + let code = r#"local lib = import "lib.libsonnet"; lib.foo.bar"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 44).into()); + expect_import_field(result, "lib.libsonnet", &["foo", "bar"]); + } + + #[test] + fn test_definition_resolves_alias_to_import_field() { + let code = r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +alias"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let definition = goto_definition(&doc, (2, 1).into()); + expect_import_field(definition, "lib.libsonnet", &["foo"]); + + let declaration = expect_local(goto_declaration(&doc, (2, 1).into())); + assert_eq!( + declaration, + Range { + start: lsp_types::Position { + line: 1, + character: 6, + }, + end: lsp_types::Position { + line: 1, + character: 11, + }, + } + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/definition/import.rs b/crates/jrsonnet-lsp-handlers/src/definition/import.rs new file mode 100644 index 00000000..f69754c4 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/definition/import.rs @@ -0,0 +1,72 @@ +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_import::extract_import_path; +use jrsonnet_lsp_inference::{trace_base, trace_expr, ConstEvalResult}; +use jrsonnet_rowan_parser::nodes::{Expr, ExprBase}; + +pub(super) fn resolve_expr_to_import( + value_expr: &Expr, + document: &Document, +) -> Option<(String, Vec)> { + if let ExprBase::ExprImport(import) = value_expr.expr_base()? { + return Some((extract_import_path(&import)?, Vec::new())); + } + + match trace_expr(value_expr, document)? { + ConstEvalResult::Import { path, fields } => Some((path, fields)), + ConstEvalResult::Std { .. } | ConstEvalResult::Local { .. } => None, + } +} + +/// Try to resolve the base expression to an import path and any additional fields. +/// +/// This handles cases like: +/// - `lib.foo` where `lib = import "lib.libsonnet"` +/// - Direct import: `(import "lib.libsonnet").foo` +/// - Chained bindings: `local x = import "..."; local y = x; y.foo` +/// +/// Returns the import path and any fields that were traced through the base. +pub(super) fn resolve_base_to_import( + base: &ExprBase, + document: &Document, +) -> Option<(String, Vec)> { + // Use const_eval to trace through bindings + match trace_base(base, document)? { + ConstEvalResult::Import { path, fields } => Some((path, fields)), + ConstEvalResult::Std { .. } | ConstEvalResult::Local { .. } => None, + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + + use super::super::{goto_definition, DefinitionResult}; + + #[test] + fn test_import_definition() { + let code = r#"import "lib/utils.libsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 8).into()); + match result { + Some(DefinitionResult::Import(path)) => { + assert_eq!(path, "lib/utils.libsonnet"); + } + other => panic!("Expected Import definition, got {other:?}"), + } + } + + #[test] + fn test_importstr_definition() { + let code = r#"importstr "data/config.txt""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 12).into()); + match result { + Some(DefinitionResult::Import(path)) => { + assert_eq!(path, "data/config.txt"); + } + other => panic!("Expected Import definition, got {other:?}"), + } + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/definition/local.rs b/crates/jrsonnet-lsp-handlers/src/definition/local.rs new file mode 100644 index 00000000..66cee3cf --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/definition/local.rs @@ -0,0 +1,188 @@ +use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document}; +use jrsonnet_lsp_scope::find_definition_range; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, Expr, ExprBase}, + AstNode, SyntaxKind, +}; +use rowan::TextRange; + +pub(super) fn find_bind_by_definition_range(document: &Document, range: TextRange) -> Option { + let ast = document.ast(); + let root = ast.syntax(); + let offset = ByteOffset::from(u32::from(range.start())); + + if let Some(token) = token_at_offset(root, offset) { + if token.kind() == SyntaxKind::IDENT { + if let Some(bind) = token + .parent() + .and_then(|node| node.ancestors().find_map(Bind::cast)) + { + if bind_definition_range(&bind) + .is_some_and(|definition_range| definition_range == range) + { + return Some(bind); + } + } + } + } + + root.descendants().filter_map(Bind::cast).find(|bind| { + bind_definition_range(bind).is_some_and(|definition_range| definition_range == range) + }) +} + +fn bind_definition_range(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bind) => { + let destruct = bind.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) + } + Bind::BindFunction(bind) => Some(bind.name()?.syntax().text_range()), + } +} + +pub(super) fn bind_value_expr(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bind) => bind.value(), + Bind::BindFunction(bind) => bind.value(), + } +} + +pub(super) fn aliased_definition_range(value_expr: &Expr) -> Option { + let ExprBase::ExprVar(var) = value_expr.expr_base()? else { + return None; + }; + let ident = var.name()?.ident_lit()?; + find_definition_range(&ident, ident.text()) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use lsp_types::Range; + + use super::super::{goto_declaration, goto_definition, DefinitionResult}; + + fn expect_local(result: Option) -> Range { + match result { + Some(DefinitionResult::Local(range)) => range, + other => panic!("Expected Local definition, got {other:?}"), + } + } + + #[test] + fn test_local_variable_definition() { + let code = r"local x = 1; x + 1"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 13).into()); + let range = expect_local(result); + assert_eq!(range.start.line, 0); + assert_eq!(range.start.character, 6); + } + + #[test] + fn test_function_parameter_definition() { + let code = r"local f(x) = x * 2; f(3)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 13).into()); + let range = expect_local(result); + assert_eq!(range.start.line, 0); + assert_eq!(range.start.character, 8); + } + + #[test] + fn test_nested_local_shadowing() { + let code = r"local x = 1; local x = 2; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 26).into()); + let range = expect_local(result); + assert_eq!(range.start.character, 19); + } + + #[test] + fn test_no_definition_for_undefined() { + let code = r"local x = 1; y + 1"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 13).into()); + assert!( + result.is_none(), + "Should not find definition for undefined y" + ); + } + + #[test] + fn test_for_comprehension_variable() { + let code = r"[x * 2 for x in [1, 2, 3]]"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 1).into()); + let range = expect_local(result); + assert_eq!(range.start.line, 0); + assert_eq!(range.start.character, 11); + } + + #[test] + fn test_object_local_binding() { + let code = r"{ local helper = 1, result: helper }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 28).into()); + let range = expect_local(result); + assert_eq!(range.start.line, 0); + assert_eq!(range.start.character, 8); + } + + #[test] + fn test_multiline_local_definition() { + let code = "local\n x\n =\n 1;\nx"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (4, 0).into()); + let range = expect_local(result); + assert_eq!(range.start.line, 1); + assert_eq!(range.start.character, 2); + } + + #[test] + fn test_definition_follows_local_alias_chain() { + let code = "local x = 1;\nlocal y = x;\ny"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let definition = expect_local(goto_definition(&doc, (2, 0).into())); + let declaration = expect_local(goto_declaration(&doc, (2, 0).into())); + + assert_eq!( + definition, + Range { + start: lsp_types::Position { + line: 0, + character: 6, + }, + end: lsp_types::Position { + line: 0, + character: 7, + }, + } + ); + assert_eq!( + declaration, + Range { + start: lsp_types::Position { + line: 1, + character: 6, + }, + end: lsp_types::Position { + line: 1, + character: 7, + }, + } + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/definition/mod.rs b/crates/jrsonnet-lsp-handlers/src/definition/mod.rs new file mode 100644 index 00000000..3272c7e5 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/definition/mod.rs @@ -0,0 +1,221 @@ +//! Go-to-definition handler. +//! +//! Finds the definition of a symbol at a given position by: +//! 1. Finding the token at the cursor position +//! 2. If it's a variable reference, walking up the scope chain to find the binding +//! 3. If it's an import path, returning the import path for resolution +//! 4. If it's a field access on an import, returning the import path and field chain + +mod field_path; +mod import; +mod local; +mod symbol; + +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document, LspPosition}; +use jrsonnet_lsp_import::check_import_path; +use jrsonnet_lsp_inference::{SemanticArtifacts, SemanticImportTarget}; +use jrsonnet_lsp_scope::{find_definition_range, is_variable_reference}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::Range; +use rowan::TextRange; +use tracing::debug; + +use self::{ + field_path::check_field_access_on_import, + import::resolve_expr_to_import, + local::{aliased_definition_range, bind_value_expr, find_bind_by_definition_range}, +}; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum DefinitionMode { + Declaration, + Definition, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +enum CanonicalDefinition { + Local(TextRange), + Import(String), + ImportField { path: String, fields: Vec }, +} + +/// Result of a go-to-definition request. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum DefinitionResult { + /// A local definition within the same document. + Local(Range), + /// An import path that needs to be resolved relative to the document. + Import(String), + /// A field in an imported file. Contains the import path and the field chain. + /// For `lib.foo.bar`, this would be `("lib.libsonnet", ["foo", "bar"])`. + ImportField { + /// The import path to resolve. + path: String, + /// The chain of field names to navigate. + fields: Vec, + }, +} + +/// Find the definition of the symbol at the given position. +/// +/// Returns a `DefinitionResult` indicating either: +/// - A local range within the document +/// - An import path that needs to be resolved by the caller +/// - A field in an imported file (path + field chain) +pub fn goto_definition(document: &Document, position: LspPosition) -> Option { + goto_with_mode(document, position, DefinitionMode::Definition, None) +} + +/// Find the declaration site of the symbol at the given position. +/// +/// Unlike `goto_definition`, this does not follow local alias chains. +pub fn goto_declaration(document: &Document, position: LspPosition) -> Option { + goto_with_mode(document, position, DefinitionMode::Declaration, None) +} + +/// Find the definition of the symbol at the given position using semantic artifacts when available. +pub fn goto_definition_with_semantic( + document: &Document, + position: LspPosition, + semantic: Option<&SemanticArtifacts>, +) -> Option { + goto_with_mode(document, position, DefinitionMode::Definition, semantic) +} + +/// Find the declaration site of the symbol at the given position using semantic artifacts when available. +pub fn goto_declaration_with_semantic( + document: &Document, + position: LspPosition, + semantic: Option<&SemanticArtifacts>, +) -> Option { + goto_with_mode(document, position, DefinitionMode::Declaration, semantic) +} + +fn goto_with_mode( + document: &Document, + position: LspPosition, + mode: DefinitionMode, + semantic: Option<&SemanticArtifacts>, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + // Convert LSP position to byte offset + let offset = line_index.offset(position, text)?; + + // Get the AST root + let ast = document.ast(); + + // Find the token at the offset + let token = token_at_offset(ast.syntax(), offset)?; + + // Check for import path first + if let Some(import_path) = check_import_path(&token) { + debug!(import_path = %import_path, "found import definition"); + return Some(DefinitionResult::Import(import_path)); + } + + // Check if this is an identifier that could be a variable reference + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name = token.text().to_string(); + + // Check if this identifier is a field access (e.g., `foo` in `lib.foo`) + // and if the base resolves to an import + if let Some(result) = check_field_access_on_import(&token, &name, document) { + return Some(result); + } + + // Check if this identifier is part of a variable reference (ExprVar) + if !is_variable_reference(&token) { + return None; + } + + // Walk up the scope chain to find the definition + let def_range = semantic + .and_then(|artifacts| artifacts.definition_for_ident_token(&token)) + .or_else(|| find_definition_range(&token, &name))?; + + let result = if mode == DefinitionMode::Definition { + match resolve_canonical_definition(document, def_range, semantic) { + CanonicalDefinition::Local(range) => { + DefinitionResult::Local(to_lsp_range(range, line_index, text)) + } + CanonicalDefinition::Import(path) => DefinitionResult::Import(path), + CanonicalDefinition::ImportField { path, fields } => { + DefinitionResult::ImportField { path, fields } + } + } + } else { + DefinitionResult::Local(to_lsp_range(def_range, line_index, text)) + }; + + debug!(name = %name, mode = ?mode, ?result, "resolved symbol target"); + Some(result) +} + +fn resolve_canonical_definition( + document: &Document, + initial_def: TextRange, + semantic: Option<&SemanticArtifacts>, +) -> CanonicalDefinition { + let mut visited = std::collections::HashSet::new(); + let mut current = initial_def; + + loop { + if !visited.insert(current) { + return CanonicalDefinition::Local(current); + } + + if let Some(artifacts) = semantic { + if let Some(binding) = artifacts.binding_info(current) { + if let Some(import_target) = &binding.import_target { + return match import_target { + SemanticImportTarget::Import { path } => { + CanonicalDefinition::Import(path.clone()) + } + SemanticImportTarget::ImportField { path, fields } => { + CanonicalDefinition::ImportField { + path: path.clone(), + fields: fields.clone(), + } + } + }; + } + + if let Some(next) = binding.alias_definition { + current = next; + continue; + } + + return CanonicalDefinition::Local(current); + } + } + + let Some(bind) = find_bind_by_definition_range(document, current) else { + return CanonicalDefinition::Local(current); + }; + let Some(value_expr) = bind_value_expr(&bind) else { + return CanonicalDefinition::Local(current); + }; + + if let Some((path, fields)) = resolve_expr_to_import(&value_expr, document) { + return if fields.is_empty() { + CanonicalDefinition::Import(path) + } else { + CanonicalDefinition::ImportField { path, fields } + }; + } + + let Some(next) = aliased_definition_range(&value_expr) else { + return CanonicalDefinition::Local(current); + }; + current = next; + } +} + +pub use symbol::{ + collect_visible_bindings, collect_visible_bindings_with_semantic, BindingKind, VisibleBinding, +}; diff --git a/crates/jrsonnet-lsp-handlers/src/definition/symbol.rs b/crates/jrsonnet-lsp-handlers/src/definition/symbol.rs new file mode 100644 index 00000000..318c5368 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/definition/symbol.rs @@ -0,0 +1,393 @@ +use jrsonnet_lsp_document::{ + find_node_at_offset, token_at_offset, ByteOffset, Document, LspPosition, +}; +use jrsonnet_lsp_inference::{SemanticArtifacts, SemanticBindingKind}; +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, BindFunction, Destruct, ExprFunction, ForSpec, MemberBindStmt, Param, StmtLocal, + }, + AstNode, SyntaxKind, SyntaxNode, +}; +use rowan::TextRange; + +/// A binding visible at a given position. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct VisibleBinding { + /// The name of the binding. + pub name: String, + /// The kind of binding. + pub kind: BindingKind, + /// The text range of the definition. + pub range: TextRange, +} + +/// The kind of a binding. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum BindingKind { + /// A local variable binding (local x = ...). + LocalVariable, + /// A function binding (local f(x) = ...). + LocalFunction, + /// A function parameter. + Parameter, + /// A for-comprehension variable. + ForVariable, +} + +/// Collect all visible bindings at the given byte offset. +#[must_use] +pub fn collect_visible_bindings(document: &Document, position: LspPosition) -> Vec { + collect_visible_bindings_with_semantic(document, position, None) +} + +/// Collect all visible bindings at the given byte offset using semantic artifacts when available. +#[must_use] +pub fn collect_visible_bindings_with_semantic( + document: &Document, + position: LspPosition, + semantic: Option<&SemanticArtifacts>, +) -> Vec { + let text = document.text(); + let line_index = document.line_index(); + + // Convert LSP position to byte offset + let Some(offset) = line_index.offset(position, text) else { + return Vec::new(); + }; + + if let Some(artifacts) = semantic { + return artifacts + .visible_bindings_at(offset.into()) + .into_iter() + .map(|binding| VisibleBinding { + name: binding.name, + range: binding.range, + kind: match binding.kind { + SemanticBindingKind::LocalVariable => BindingKind::LocalVariable, + SemanticBindingKind::LocalFunction => BindingKind::LocalFunction, + SemanticBindingKind::Parameter => BindingKind::Parameter, + SemanticBindingKind::ForVariable => BindingKind::ForVariable, + }, + }) + .collect(); + } + + // Get the AST root + let ast = document.ast(); + let root = ast.syntax(); + + let Some(mut current) = start_node_for_offset(root, offset) else { + return Vec::new(); + }; + + // Walk up the scope chain and collect all bindings + let mut bindings = Vec::new(); + let mut seen_names = std::collections::HashSet::new(); + + // First collect from current node if it's a scope + if is_scope_node(¤t) { + // Create a dummy child at the end for scope checking + collect_scope_bindings_at_offset( + ¤t, + u32::from(offset), + &mut bindings, + &mut seen_names, + ); + } + + while let Some(parent) = current.parent() { + collect_scope_bindings_at_offset( + &parent, + u32::from(offset), + &mut bindings, + &mut seen_names, + ); + current = parent; + } + + bindings +} + +fn start_node_for_offset(root: &SyntaxNode, offset: ByteOffset) -> Option { + token_at_offset(root, offset) + .and_then(|token| token.parent()) + .or_else(|| { + let offset_u32 = u32::from(offset); + if offset_u32 == 0 { + return None; + } + token_at_offset(root, ByteOffset::from(offset_u32 - 1)).and_then(|token| token.parent()) + }) + .or_else(|| find_node_at_offset(root, offset)) +} + +/// Check if a node is a scope-introducing node. +fn is_scope_node(node: &SyntaxNode) -> bool { + matches!( + node.kind(), + SyntaxKind::EXPR + | SyntaxKind::EXPR_FUNCTION + | SyntaxKind::BIND_FUNCTION + | SyntaxKind::FOR_SPEC + | SyntaxKind::OBJ_BODY_MEMBER_LIST + | SyntaxKind::EXPR_ARRAY_COMP + | SyntaxKind::OBJ_BODY_COMP + ) +} + +/// Collect bindings from a scope using offset for visibility checking. +fn collect_scope_bindings_at_offset( + scope: &SyntaxNode, + offset: u32, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + match scope.kind() { + SyntaxKind::EXPR => { + collect_expr_bindings_at_offset(scope, offset, bindings, seen); + } + SyntaxKind::EXPR_FUNCTION => { + collect_function_params(scope, bindings, seen); + } + SyntaxKind::BIND_FUNCTION => { + collect_bind_function_params(scope, bindings, seen); + } + SyntaxKind::FOR_SPEC => { + collect_for_binding(scope, bindings, seen); + } + SyntaxKind::OBJ_BODY_MEMBER_LIST => { + collect_object_locals(scope, bindings, seen); + } + SyntaxKind::EXPR_ARRAY_COMP | SyntaxKind::OBJ_BODY_COMP => { + collect_comprehension_bindings(scope, bindings, seen); + } + _ => {} + } +} + +/// Collect local bindings from an Expr using offset for visibility. +fn collect_expr_bindings_at_offset( + expr: &SyntaxNode, + offset: u32, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let text_size = rowan::TextSize::from(offset); + + for stmt_node in expr.children() { + if stmt_node.kind() == SyntaxKind::STMT_LOCAL { + // Only consider bindings that appear before our position + if stmt_node.text_range().end() > text_size { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let Some(binding) = extract_binding(&bind) { + if !seen.contains(&binding.name) { + seen.insert(binding.name.clone()); + bindings.push(binding); + } + } + } + } + } + } +} + +/// Extract a binding from a Bind node. +fn extract_binding(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bd) => { + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + return Some(VisibleBinding { + name: ident.text().to_string(), + kind: BindingKind::LocalVariable, + range: bind_name.syntax().text_range(), + }); + } + None + } + Bind::BindFunction(bf) => { + let bind_name = bf.name()?; + let ident = bind_name.ident_lit()?; + Some(VisibleBinding { + name: ident.text().to_string(), + kind: BindingKind::LocalFunction, + range: bind_name.syntax().text_range(), + }) + } + } +} + +/// Collect function parameters. +fn collect_function_params( + func_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let Some(func) = ExprFunction::cast(func_node.clone()) else { + return; + }; + let Some(params) = func.params_desc() else { + return; + }; + + for param in params.params() { + if let Some(binding) = extract_param_binding(¶m) { + if !seen.contains(&binding.name) { + seen.insert(binding.name.clone()); + bindings.push(binding); + } + } + } +} + +/// Collect `BindFunction` parameters. +fn collect_bind_function_params( + func_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let Some(func) = BindFunction::cast(func_node.clone()) else { + return; + }; + let Some(params) = func.params() else { + return; + }; + + for param in params.params() { + if let Some(binding) = extract_param_binding(¶m) { + if !seen.contains(&binding.name) { + seen.insert(binding.name.clone()); + bindings.push(binding); + } + } + } +} + +/// Extract a binding from a Param node. +fn extract_param_binding(param: &Param) -> Option { + let destruct = param.destruct()?; + if let Destruct::DestructFull(full) = destruct { + let param_name = full.name()?; + let ident = param_name.ident_lit()?; + return Some(VisibleBinding { + name: ident.text().to_string(), + kind: BindingKind::Parameter, + range: param_name.syntax().text_range(), + }); + } + None +} + +/// Collect for-comprehension binding. +fn collect_for_binding( + for_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let Some(for_spec) = ForSpec::cast(for_node.clone()) else { + return; + }; + let Some(destruct) = for_spec.bind() else { + return; + }; + + if let Destruct::DestructFull(full) = destruct { + if let Some(bind_name) = full.name() { + if let Some(ident) = bind_name.ident_lit() { + let name = ident.text().to_string(); + if !seen.contains(&name) { + seen.insert(name.clone()); + bindings.push(VisibleBinding { + name, + kind: BindingKind::ForVariable, + range: bind_name.syntax().text_range(), + }); + } + } + } + } +} + +/// Collect object local bindings. +fn collect_object_locals( + obj_body: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + for member_node in obj_body.children() { + if member_node.kind() != SyntaxKind::MEMBER_BIND_STMT { + continue; + } + let Some(member_bind) = MemberBindStmt::cast(member_node) else { + continue; + }; + let Some(obj_local) = member_bind.obj_local() else { + continue; + }; + let Some(bind) = obj_local.bind() else { + continue; + }; + let Some(binding) = extract_binding(&bind) else { + continue; + }; + if seen.contains(&binding.name) { + continue; + } + seen.insert(binding.name.clone()); + bindings.push(binding); + } +} + +/// Collect bindings from comprehension `FOR_SPEC` children. +fn collect_comprehension_bindings( + comp_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + for child in comp_node.children() { + if child.kind() != SyntaxKind::FOR_SPEC { + continue; + } + collect_for_binding(&child, bindings, seen); + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use rowan::TextRange; + + use super::super::{collect_visible_bindings, BindingKind, VisibleBinding}; + + #[test] + fn test_collect_visible_bindings_at_eof() { + let code = "local x = 1;\n"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + assert_eq!( + collect_visible_bindings(&doc, (1, 0).into()), + vec![VisibleBinding { + name: "x".to_string(), + kind: BindingKind::LocalVariable, + range: TextRange::new(6.into(), 7.into()), + }] + ); + } + + #[test] + fn test_collect_visible_bindings_out_of_bounds_position() { + let code = "local x = 1;"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + assert_eq!( + collect_visible_bindings(&doc, (9, 0).into()), + Vec::::new() + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/document_highlight.rs b/crates/jrsonnet-lsp-handlers/src/document_highlight.rs new file mode 100644 index 00000000..83db23e5 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/document_highlight.rs @@ -0,0 +1,102 @@ +//! Document highlight handler. +//! +//! Highlights all occurrences of the symbol under cursor in the current file. + +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document, LspPosition}; +use jrsonnet_lsp_scope::{ + find_definition_range, is_definition_site, is_variable_reference, ScopeResolver, +}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::{DocumentHighlight, DocumentHighlightKind}; + +/// Find document highlights for the symbol at the given position. +#[must_use] +pub fn document_highlights(document: &Document, position: LspPosition) -> Vec { + let text = document.text(); + let line_index = document.line_index(); + let Some(offset) = line_index.offset(position, text) else { + return Vec::new(); + }; + + let ast = document.ast(); + let Some(token) = token_at_offset(ast.syntax(), offset) else { + return Vec::new(); + }; + + if token.kind() != SyntaxKind::IDENT { + return Vec::new(); + } + + let name = token.text(); + let definition_range = if is_definition_site(&token) { + token.parent().map(|p| p.text_range()) + } else if is_variable_reference(&token) { + find_definition_range(&token, name) + } else { + None + }; + let Some(def_range) = definition_range else { + return Vec::new(); + }; + + let resolver = ScopeResolver::new(ast.syntax()); + resolver + .find_references(ast.syntax(), name, def_range) + .into_iter() + .map(|range| DocumentHighlight { + range: to_lsp_range(range, line_index, text), + kind: Some(if range == def_range { + DocumentHighlightKind::WRITE + } else { + DocumentHighlightKind::READ + }), + }) + .collect() +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + + use super::*; + + #[test] + fn test_document_highlights_local_variable() { + let code = "local x = 1; x + x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 13).into(); + + let mut highlights = document_highlights(&doc, pos); + highlights.sort_by_key(|h| h.range.start.character); + + assert_eq!(highlights.len(), 3); + assert_eq!(highlights[0].range.start.character, 6); + assert_eq!(highlights[0].kind, Some(DocumentHighlightKind::WRITE)); + assert_eq!(highlights[1].kind, Some(DocumentHighlightKind::READ)); + assert_eq!(highlights[2].kind, Some(DocumentHighlightKind::READ)); + } + + #[test] + fn test_document_highlights_non_identifier() { + let code = "local x = 1; x + x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 15).into(); + + assert!(document_highlights(&doc, pos).is_empty()); + } + + #[test] + fn test_document_highlights_respects_scope() { + let code = "local x = 1; (local x = 2; x) + x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 6).into(); + + let mut starts: Vec = document_highlights(&doc, pos) + .into_iter() + .map(|h| h.range.start.character) + .collect(); + starts.sort_unstable(); + + assert_eq!(starts, vec![6, 32]); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs b/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs new file mode 100644 index 00000000..1d9cb257 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs @@ -0,0 +1,171 @@ +use jrsonnet_fmt::format_code_range; +use jrsonnet_lsp_document::{ByteOffset, LineIndex, LspRange}; +use lsp_types::{Position, Range, TextEdit}; +use tracing::debug; + +use super::{engine::run_formatter, FormattingConfig}; + +fn full_document_range(text: &str) -> Range { + let line_index = LineIndex::new(text); + let end = line_index + .position(ByteOffset::from(text.len()), text) + .unwrap_or_default(); + + Range { + start: Position { + line: 0, + character: 0, + }, + end: end.into(), + } +} + +fn try_format(text: &str, config: &FormattingConfig) -> Option { + match run_formatter(text, config) { + Ok(formatted) => Some(formatted), + Err(err) => { + debug!("Formatting unavailable: {err}"); + None + } + } +} + +/// Format a Jsonnet document with default configuration. +/// +/// Returns a list of text edits to apply to the document. +/// On error, returns None. +#[must_use] +pub fn format_document(text: &str) -> Option> { + format_document_with_config(text, &FormattingConfig::default()) +} + +/// Format a Jsonnet document with the given configuration. +/// +/// Returns a list of text edits to apply to the document. +/// On error, returns None. +#[must_use] +pub fn format_document_with_config(text: &str, config: &FormattingConfig) -> Option> { + let formatted = try_format(text, config)?; + + if formatted == text { + // No changes needed + return Some(Vec::new()); + } + + // Return a single edit that replaces the entire document. + Some(vec![TextEdit { + range: full_document_range(text), + new_text: formatted, + }]) +} + +/// Format a Jsonnet document range with default configuration. +/// +/// Returns a list of text edits constrained to the requested range. +/// On error, returns None. +#[must_use] +pub fn format_document_range(text: &str, range: Range) -> Option> { + format_document_range_with_config(text, range, &FormattingConfig::default()) +} + +/// Format a Jsonnet document range with the given configuration. +/// +/// Returns range edits only when formatter changes are fully inside `range`. +/// On error, returns None. +#[must_use] +pub fn format_document_range_with_config( + text: &str, + range: Range, + config: &FormattingConfig, +) -> Option> { + let line_index = LineIndex::new(text); + let requested = line_index.text_range(LspRange::from(range), text)?; + let requested_range = usize::from(ByteOffset::from(requested.start())) + ..usize::from(ByteOffset::from(requested.end())); + + let edits = if let Some(edits) = format_code_range(text, requested_range, config) { + edits + } else { + debug!("Formatting unavailable: formatter failed for range request"); + return None; + }; + + edits + .into_iter() + .map(|edit| { + let edit_range = rowan::TextRange::new( + ByteOffset::from(edit.range.start).into(), + ByteOffset::from(edit.range.end).into(), + ); + Some(TextEdit { + range: line_index.range(edit_range, text)?.into(), + new_text: edit.new_text, + }) + }) + .collect() +} + +#[cfg(test)] +mod tests { + use lsp_types::{Position, Range}; + + use super::{format_document_range_with_config, FormattingConfig}; + + #[test] + fn test_range_formatting_applies_edit_within_requested_range() { + let text = "{\n a: 1,\n b:2,\n}\n"; + let config = FormattingConfig { + indent: 2, + ..FormattingConfig::default() + }; + let range = Range { + start: Position { + line: 2, + character: 0, + }, + end: Position { + line: 2, + character: 6, + }, + }; + let edits = format_document_range_with_config(text, range, &config) + .expect("range formatting should succeed"); + assert_eq!(edits.len(), 1); + assert_eq!( + edits[0].range, + Range { + start: Position { + line: 2, + character: 4, + }, + end: Position { + line: 2, + character: 4, + }, + } + ); + assert_eq!(edits[0].new_text, " "); + } + + #[test] + fn test_range_formatting_returns_no_edits_when_changes_escape_requested_range() { + let text = "{\n a:1,\n b:2,\n}\n"; + let config = FormattingConfig { + indent: 2, + ..FormattingConfig::default() + }; + let range = Range { + start: Position { + line: 1, + character: 0, + }, + end: Position { + line: 1, + character: 6, + }, + }; + let edits = format_document_range_with_config(text, range, &config) + .expect("range formatting should succeed"); + assert!(edits.is_empty()); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs b/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs new file mode 100644 index 00000000..b924bf62 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs @@ -0,0 +1,98 @@ +use jrsonnet_fmt::format_code; +use thiserror::Error; + +use super::FormattingConfig; + +#[derive(Debug, Error)] +pub(super) enum FormatterError { + #[error("formatter failed")] + FormatFailed, +} + +pub(super) fn run_formatter( + input: &str, + config: &FormattingConfig, +) -> Result { + format_code(input, config).ok_or(FormatterError::FormatFailed) +} + +#[cfg(test)] +mod tests { + use lsp_types::{Position, Range, TextEdit}; + use rstest::rstest; + + use super::{super::dispatch::format_document_with_config, *}; + + const SIMPLE_OBJECT: &str = "{a:1}"; + const SIMPLE_OBJECT_WITH_TRAILING_NEWLINE: &str = "{a:1}\n"; + const STRING_OBJECT: &str = "{a:'x'}"; + const STRING_OBJECT_WITH_EMOJI: &str = "{a:'🦀'}"; + + fn full_replacement_edit(end: Position, new_text: String) -> Vec { + vec![TextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end, + }, + new_text, + }] + } + + fn expected_end(text: &str) -> Position { + let line_index = jrsonnet_lsp_document::LineIndex::new(text); + line_index + .position(jrsonnet_lsp_document::ByteOffset::from(text.len()), text) + .map(Into::into) + .unwrap_or_default() + } + + #[test] + fn test_formats_document() { + assert_eq!( + format_document_with_config(SIMPLE_OBJECT, &FormattingConfig::default()), + Some(full_replacement_edit( + expected_end(SIMPLE_OBJECT), + "{\n\ta: 1,\n}\n".to_string() + )) + ); + } + + #[test] + fn test_respects_string_style_option() { + let config = FormattingConfig { + string_style: jrsonnet_fmt::StringStyle::Double, + ..FormattingConfig::default() + }; + assert_eq!( + format_document_with_config(STRING_OBJECT, &config), + Some(full_replacement_edit( + expected_end(STRING_OBJECT), + "{\n\ta: \"x\",\n}\n".to_string() + )) + ); + } + + #[test] + fn test_returns_none_on_parse_failure() { + assert_eq!( + format_document_with_config("local x = ", &FormattingConfig::default()), + None + ); + } + + #[rstest] + #[case(SIMPLE_OBJECT_WITH_TRAILING_NEWLINE, Position { line: 1, character: 0 })] + #[case(STRING_OBJECT_WITH_EMOJI, Position { line: 0, character: 8 })] + fn test_full_replacement_range_uses_lsp_positions( + #[case] input: &str, + #[case] expected_end: Position, + ) { + let edits = format_document_with_config(input, &FormattingConfig::default()) + .expect("formatting edit"); + assert_eq!(edits.len(), 1); + assert_eq!(edits[0].range.end, expected_end); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs b/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs new file mode 100644 index 00000000..c858b1c5 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs @@ -0,0 +1,15 @@ +//! Document formatting handler. +//! +//! Formats Jsonnet code using an in-process formatter. + +mod dispatch; +mod engine; + +pub use dispatch::{ + format_document, format_document_range, format_document_range_with_config, + format_document_with_config, +}; +pub use jrsonnet_fmt::{ + CommentStyle as FormattingCommentStyle, FormatOptions as FormattingConfig, + StringStyle as FormattingStringStyle, +}; diff --git a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs new file mode 100644 index 00000000..c8cac4c6 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs @@ -0,0 +1,847 @@ +use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document, LspPosition}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_rowan_parser::{nodes::Trivia, AstNode, AstToken, SyntaxToken}; +use lsp_types::Hover; + +use super::{ + local::{check_local_hover, LocalHoverSections}, + model::{compose_hover_contents_with_policy, HoverDocBlock, HoverFacts, HoverTokenPurpose}, + preview_policy::select_preview_code, + stdlib::stdlib_hover_doc, + target::HoverTarget, + ImportFieldTypeResolver, +}; + +#[derive(Debug, Clone, PartialEq, Eq, Default)] +struct HoverDocsFacts { + has_stdlib_docs: bool, + has_token_docs: bool, + blocks: Vec, +} + +/// Get hover information for the given position. +/// +/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure +/// that import types are properly resolved. +pub fn hover(document: &Document, position: LspPosition, analysis: &TypeAnalysis) -> Option { + hover_with_import_field_type(document, position, analysis, None) +} + +/// Get hover information with an optional callback for imported field type lookup. +/// +/// When provided, `import_field_type_resolver` is used to improve hover precision for +/// `DefinitionResult::ImportField` targets by querying the imported document directly. +pub fn hover_with_import_field_type( + document: &Document, + position: LspPosition, + analysis: &TypeAnalysis, + import_field_type_resolver: Option<&ImportFieldTypeResolver<'_>>, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + let offset = line_index.offset(position, text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + + // Never provide hover for trivia tokens. + if Trivia::cast(token.clone()).is_some() { + return None; + } + + let local_sections = check_local_hover( + document, + analysis, + position, + offset, + import_field_type_resolver, + ); + let local_kind = local_sections.as_ref().map(|sections| sections.kind); + let (local_type_markdown, context_markdown, definition_preview) = local_sections.map_or( + (None, None, None), + |LocalHoverSections { + kind: _, + type_markdown, + context_markdown, + preview_code, + }| (type_markdown, context_markdown, preview_code), + ); + let docs_facts = docs_facts_for_token(&token); + let expression_hover_allowed = token.kind().is_hover_eligible(); + if local_kind.is_none() + && !docs_facts.has_stdlib_docs + && !docs_facts.has_token_docs + && !expression_hover_allowed + { + return None; + } + let type_markdown = + local_type_markdown.or_else(|| inferred_type_markdown(document, analysis, offset)); + let preview_code = select_preview_code(&token, definition_preview); + let target = HoverTarget::classify( + local_kind, + docs_facts.has_stdlib_docs, + docs_facts.has_token_docs, + ); + let contents = compose_hover_contents_with_policy( + target.policy(), + HoverFacts { + type_markdown, + context_markdown, + docs: docs_facts.blocks, + preview_code, + }, + )?; + + Some(Hover { + contents, + range: None, + }) +} + +fn inferred_type_markdown( + document: &Document, + analysis: &TypeAnalysis, + offset: ByteOffset, +) -> Option { + let ast = document.ast(); + let ty = analysis.type_at_position(ast.syntax(), offset.into())?; + Some(format!("`{}`", analysis.display_for_hover(ty))) +} + +fn docs_facts_for_token(token: &SyntaxToken) -> HoverDocsFacts { + let mut docs_facts = HoverDocsFacts::default(); + + if let Some(doc) = stdlib_hover_doc(token) { + docs_facts.has_stdlib_docs = true; + docs_facts.blocks.push(HoverDocBlock::Stdlib { + name: doc.name.to_string(), + signature: doc.signature.to_string(), + description: doc.description.to_string(), + example: doc.example.map(ToString::to_string), + }); + } + + let token_purposes = if token.kind().is_hover_eligible() { + token.kind().token_doc_purposes() + } else { + &[] + }; + if !token_purposes.is_empty() { + docs_facts.has_token_docs = true; + docs_facts.blocks.push(HoverDocBlock::TokenPurposes( + token_purposes + .iter() + .map(|purpose| HoverTokenPurpose { + doc: purpose.doc.to_string(), + example: purpose.example.to_string(), + }) + .collect(), + )); + } + + docs_facts +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use assert_matches::assert_matches; + use indoc::indoc; + use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_inference::ImportResolver; + use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, FunctionData, GlobalTy, GlobalTyStore, ObjectData, + ParamInterned, ReturnSpec, Ty, TyData, + }; + use lsp_types::{HoverContents, MarkedString}; + use rstest::rstest; + + use super::*; + + fn get_hover(code: &str, line: u32, character: u32) -> Option { + let global_types = Arc::new(GlobalTyStore::new()); + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze_with_global(&doc, global_types); + let pos = (line, character).into(); + hover(&doc, pos, &analysis) + } + + fn marked_string_as_markdown(marked: &MarkedString) -> String { + match marked { + MarkedString::String(value) => value.clone(), + MarkedString::LanguageString(language) => { + format!("```{}\n{}\n```", language.language, language.value) + } + } + } + + fn hover_contents_as_sections(contents: &HoverContents) -> Option> { + match contents { + HoverContents::Array(items) => Some( + items + .iter() + .map(marked_string_as_markdown) + .collect::>(), + ), + HoverContents::Markup(_) | HoverContents::Scalar(_) => None, + } + } + + fn assert_hover_contents(contents: &HoverContents, expected_sections: &[&str]) { + let actual_sections = + hover_contents_as_sections(contents).expect("hover contents should be array"); + let expected_sections = expected_sections + .iter() + .map(|section| (*section).to_string()) + .collect::>(); + assert_eq!(actual_sections, expected_sections); + } + + #[derive(Debug)] + struct StaticImportResolver { + path: &'static str, + ty: GlobalTy, + } + + impl ImportResolver for StaticImportResolver { + fn resolve_import(&self, import_path: &str) -> Option { + (import_path == self.path).then_some(self.ty) + } + } + + fn function_type_x_to_number(global_types: &Arc) -> Ty { + global_types.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + })) + } + + fn closed_object_type(global_types: &Arc, fields: Vec<(&str, Ty)>) -> Ty { + let mut object_fields: Vec<(String, FieldDefInterned)> = fields + .into_iter() + .map(|(name, ty)| { + ( + name.to_string(), + FieldDefInterned { + ty, + required: true, + visibility: FieldVis::Normal, + }, + ) + }) + .collect(); + object_fields.sort_by(|(left, _), (right, _)| left.cmp(right)); + global_types.intern(TyData::Object(ObjectData { + fields: object_fields, + has_unknown: false, + })) + } + + fn module_type_with_foo(global_types: &Arc, foo_ty: Ty) -> GlobalTy { + let module_ty = closed_object_type(global_types, vec![("foo", foo_ty)]); + GlobalTy::new(module_ty).expect("module type should be global") + } + + #[rstest] + #[case( + "std.map(function(x) x, [])", + 0, 4, + vec![ + "`function(func: function(), arr: array)`", + indoc! {r" + ```jsonnet + std.map(func, arr) + ```"}, + "Applies `func` to each element of `arr`.", + "**Example:**", + indoc! {r" + ```jsonnet + std.map(function(x) x * 2, [1,2,3]) // [2, 4, 6] + ```"}, + indoc! {r" + ```jsonnet + std.map + ```"}, + ] + )] + #[case( + "local s = std; s.map(function(x) x, [])", + 0, 17, + vec![ + "`function(func: function(), arr: array)`", + indoc! {r" + ```jsonnet + std.map(func, arr) + ```"}, + "Applies `func` to each element of `arr`.", + "**Example:**", + indoc! {r" + ```jsonnet + std.map(function(x) x * 2, [1,2,3]) // [2, 4, 6] + ```"}, + indoc! {r" + ```jsonnet + s.map + ```"}, + ] + )] + #[case( + "std.filter(function(x) x > 0, [1, -1, 2])", + 0, 4, + vec![ + "`function(func: function(), arr: array)`", + indoc! {r" + ```jsonnet + std.filter(func, arr) + ```"}, + "Returns elements of `arr` where `func(x)` is true.", + "**Example:**", + indoc! {r" + ```jsonnet + std.filter(function(x) x > 1, [1,2,3]) // [2, 3] + ```"}, + indoc! {r" + ```jsonnet + std.filter + ```"}, + ] + )] + fn test_stdlib_hover( + #[case] code: &str, + #[case] line: u32, + #[case] char: u32, + #[case] expected: Vec<&str>, + ) { + let result = get_hover(code, line, char); + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents(&contents, &expected); + }); + } + + #[test] + fn test_stdlib_hover_shadowed_std_returns_none() { + let code = "local std = { map(x): x }; std.map(1)"; + let global_types = Arc::new(GlobalTyStore::new()); + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze_with_global(&doc, global_types); + + let map_offset = + u32::try_from(code.rfind("map").expect("test source contains map token")).unwrap(); + let pos = (0, map_offset).into(); + let offset = doc + .line_index() + .offset(pos, doc.text()) + .expect("offset should exist"); + let token = token_at_offset(doc.ast().syntax(), offset).expect("token should exist"); + assert_eq!(stdlib_hover_doc(&token), None); + + let hover_result = hover(&doc, pos, &analysis); + assert_matches!( + hover_result, + Some(Hover { + contents, + range: None + }) => { + assert_hover_contents( + &contents, + &[ + "`function(x: any)`", + indoc! {r" + ```jsonnet + std.map + ```"}, + ], + ); + } + ); + } + + #[rstest] + #[case( + "local foo = 1; foo", + 0, + 15, + vec![ + "`number`", + indoc! {r" + ```jsonnet + local foo = 1; + ```"}, + ] + )] + #[case( + "local add(a, b) = a + b; add(1, 2)", + 0, + 25, + vec![ + "`(a: any, b: any) -> number`", + indoc! {r" + ```jsonnet + local add(a, b) = a + b; + ```"}, + ] + )] + #[case( + "local arr = [1, 2, 3]; arr", + 0, + 6, + vec![ + "`[number, number, number]`", + indoc! {r" + ```jsonnet + local arr = [1, 2, 3]; + ```"}, + ] + )] + #[case( + "local obj = { a: 1 }; obj", + 0, + 6, + vec![ + "`{ a: number }`", + indoc! {r" + ```jsonnet + local obj = { a: 1 }; + ```"}, + ] + )] + fn test_local_hover( + #[case] code: &str, + #[case] line: u32, + #[case] char: u32, + #[case] expected: Vec<&str>, + ) { + let result = get_hover(code, line, char); + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents(&contents, &expected); + }); + } + + #[test] + fn test_local_hover_multiline_preview_trims_trailing_blank_lines() { + let code = "{\n local x = {\n a: 1,\n b: 2,\n },\n\n z: x,\n}\n"; + let result = get_hover(code, 6, 5); + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents( + &contents, + &[ + "`{ a: number, b: number }`", + indoc! {r" + ```jsonnet + local x = { + a: 1, + b: 2, + } + ```"}, + ], + ); + }); + } + + #[test] + fn test_local_hover_file_level_preview_trims_trailing_blank_lines() { + let code = "local x = {\n a: 1,\n b: 2,\n};\n\n{\n z: x,\n}\n"; + let result = get_hover(code, 6, 5); + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents( + &contents, + &[ + "`{ a: number, b: number }`", + indoc! {r" + ```jsonnet + local x = { + a: 1, + b: 2, + }; + ```"}, + ], + ); + }); + } + + #[rstest] + #[case( + r#"import "lib/utils.libsonnet""#, + 0, + 10, + vec![ + "`any`", + "`lib/utils.libsonnet`", + indoc! {r#" + ```jsonnet + import "lib/utils.libsonnet" + ```"#}, + ] + )] + #[case( + r#"local lib = import "lib.libsonnet"; lib.foo"#, + 0, + 40, + vec![ + "`any`", + "`foo` from `lib.libsonnet`", + indoc! {r" + ```jsonnet + lib.foo + ```"}, + ] + )] + fn test_import_hover( + #[case] code: &str, + #[case] line: u32, + #[case] char: u32, + #[case] expected: Vec<&str>, + ) { + let result = get_hover(code, line, char); + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents(&contents, &expected); + }); + } + + #[rstest] + #[case( + None, + vec![ + "`function(x)`", + "`foo` from `lib.libsonnet`", + indoc! {r" + ```jsonnet + lib.foo + ```"}, + ], + "function(x)" + )] + #[case( + Some("function"), + vec![ + "`(x: any) -> number`", + "`foo` from `lib.libsonnet`", + indoc! {r" + ```jsonnet + lib.foo + ```"}, + ], + "function(x)" + )] + #[case( + Some("object"), + vec![ + "`{ resolved: string }`", + "`foo` from `lib.libsonnet`", + indoc! {r" + ```jsonnet + lib.foo + ```"}, + ], + "{ resolved: string }" + )] + fn test_import_field_hover_resolution_exact_shape( + #[case] inferred_import_kind: Option<&str>, + #[case] expected: Vec<&str>, + #[case] resolved_type: &str, + ) { + let code = r#"local lib = import "lib.libsonnet"; lib.foo"#; + let global_types = Arc::new(GlobalTyStore::new()); + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = match inferred_import_kind { + None => TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global_types)), + Some("function") => { + let module_ty = + module_type_with_foo(&global_types, function_type_x_to_number(&global_types)); + let import_resolver = Arc::new(StaticImportResolver { + path: "lib.libsonnet", + ty: module_ty, + }); + TypeAnalysis::analyze_with_resolver( + &doc, + Arc::clone(&global_types), + import_resolver, + ) + } + Some("object") => { + let local_object_ty = + closed_object_type(&global_types, vec![("localOnly", Ty::NUMBER)]); + let module_ty = module_type_with_foo(&global_types, local_object_ty); + let import_resolver = Arc::new(StaticImportResolver { + path: "lib.libsonnet", + ty: module_ty, + }); + TypeAnalysis::analyze_with_resolver( + &doc, + Arc::clone(&global_types), + import_resolver, + ) + } + Some(other) => panic!("unsupported inferred_import_kind test case: {other}"), + }; + + let result = hover_with_import_field_type( + &doc, + (0, 40).into(), + &analysis, + Some(&|path, fields| { + assert_eq!(path, "lib.libsonnet"); + assert_eq!(fields, &["foo".to_string()]); + Some(resolved_type.to_string()) + }), + ); + + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents(&contents, &expected); + }); + } + + #[rstest] + #[case( + "local x = 42; x", + 0, + 14, + vec![ + "`number`", + indoc! {r" + ```jsonnet + local x = 42; + ```"}, + ] + )] + #[case( + "local obj = { a: 1, b: \"hello\" }; obj", + 0, + 35, + vec![ + "`{ a: number, b: string }`", + indoc! {r#" + ```jsonnet + local obj = { a: 1, b: "hello" }; + ```"#}, + ] + )] + #[case( + "42", + 0, + 0, + vec![ + "`number`", + indoc! {r" + ```jsonnet + 42 + ```"}, + ] + )] + #[case( + "\"hello\"", + 0, + 1, + vec![ + "`string`", + indoc! {r#" + ```jsonnet + "hello" + ```"#}, + ] + )] + #[case( + "[1, 2, 3]", + 0, + 1, + vec![ + "`number`", + indoc! {r" + ```jsonnet + 1 + ```"}, + ] + )] + fn test_type_inference_hover( + #[case] code: &str, + #[case] line: u32, + #[case] char: u32, + #[case] expected: Vec<&str>, + ) { + let result = get_hover(code, line, char); + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents(&contents, &expected); + }); + } + + #[test] + fn test_keyword_hover_includes_token_docs() { + let result = get_hover("null", 0, 0); + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents( + &contents, + &[ + "`null`", + "Literal `null` value.", + indoc! {r" + ```jsonnet + null + ```"}, + ], + ); + }); + } + + #[test] + fn test_operator_hover_docs() { + let result = get_hover("1 + 2", 0, 2); + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents( + &contents, + &[ + "`number`", + "`+` adds numbers, concatenates strings/arrays, or merges objects.", + indoc! {r" + ```jsonnet + 1 + 2 + ```"}, + ], + ); + }); + } + + #[test] + fn test_multi_purpose_operator_hover_docs() { + let result = get_hover("\"hello %s\" % \"world\"", 0, 11); + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents( + &contents, + &[ + "`string`", + "This token has multiple purposes:", + indoc! {r" + **Purpose 1** + + `%` computes numeric remainder."}, + indoc! {r" + ```jsonnet + 5 % 2 + ```"}, + indoc! {r" + **Purpose 2** + + `%` formats strings with placeholders."}, + indoc! {r#" + ```jsonnet + "hello %s" % "world" + ```"#}, + ], + ); + }); + } + + #[test] + fn test_no_hover_on_whitespace() { + let result = get_hover("local x = 1; x", 0, 13); + assert_matches!(result, None); + } + + #[test] + fn test_no_hover_on_object_local_bind_equals() { + let code = "{ local x = { a: 1 }, z: x }"; + let result = get_hover(code, 0, 10); + assert_matches!(result, None); + } + + #[test] + fn test_no_hover_on_open_brace() { + let code = "{ z: { a: 1, b: 2 } }"; + let result = get_hover(code, 0, 0); + assert_matches!(result, None); + } + + #[test] + fn test_no_hover_on_close_brace() { + let code = "{ z: { a: 1, b: 2 } }"; + let close_brace = u32::try_from(code.rfind('}').expect("code should end with `}`")) + .expect("close brace offset should fit u32"); + let result = get_hover(code, 0, close_brace); + assert_matches!(result, None); + } + + #[test] + fn test_hover_on_function_sugar_param_returns_parameter_type() { + let code = "local f(x) = x + 1;\nf"; + let result = get_hover(code, 0, 8); + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents( + &contents, + &[ + "`any`", + indoc! {r" + ```jsonnet + local f(x) = x + 1; + ```"}, + ], + ); + }); + } + + #[test] + fn test_hover_on_function_sugar_param_reference_shows_preview() { + let code = "local f(x) = x + 1;\nf"; + let result = get_hover(code, 0, 13); + assert_matches!(result, Some(Hover { + contents, + range: None + }) => { + assert_hover_contents( + &contents, + &[ + "`any`", + indoc! {r" + ```jsonnet + local f(x) = x + 1; + ```"}, + ], + ); + }); + } + + #[test] + fn test_no_hover_on_function_sugar_equals() { + let code = "local f(x) = x + 1;\nf"; + let result = get_hover(code, 0, 11); + assert_matches!(result, None); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/hover/local.rs b/crates/jrsonnet-lsp-handlers/src/hover/local.rs new file mode 100644 index 00000000..6d5cb35f --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/local.rs @@ -0,0 +1,273 @@ +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, ByteOffset, Document, LspPosition}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_types::{Ty, TyData}; +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, Destruct, ExprFunction, ForSpec, MemberBindStmt, MemberFieldMethod, Param, StmtLocal, + }, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use rowan::TextRange; + +use super::{preview_policy::render_preview, ImportFieldTypeResolver}; +use crate::definition::{goto_declaration, goto_definition, DefinitionResult}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) enum LocalHoverKind { + Local, + Import, + ImportField, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) struct LocalHoverSections { + pub kind: LocalHoverKind, + pub type_markdown: Option, + pub context_markdown: Option, + pub preview_code: Option, +} + +/// Check for hover on a local variable reference. +pub(super) fn check_local_hover( + document: &Document, + analysis: &TypeAnalysis, + position: LspPosition, + offset: ByteOffset, + import_field_type_resolver: Option<&ImportFieldTypeResolver<'_>>, +) -> Option { + let result = goto_definition(document, position) + .or_else(|| local_definition_at_offset(document, offset))?; + let declaration_range = goto_declaration(document, position).and_then(local_range_from_result); + + // Get the inferred type at this position. If the local definition site only + // reports `any`, fall back to the bound value expression type. + let ast = document.ast(); + let inferred_ty = analysis.type_at_position(ast.syntax(), offset.into()); + let mut inferred_type = inferred_ty.map(|ty| analysis.display_for_hover(ty)); + let inferred_is_any = inferred_ty.is_none_or(|ty| ty == Ty::ANY); + let inferred_is_object = inferred_ty + .is_some_and(|ty| analysis.with_data(ty, |data| matches!(data, TyData::Object(_)))); + + match &result { + DefinitionResult::ImportField { path, fields } => { + if inferred_is_any || inferred_is_object { + if let Some(resolver) = import_field_type_resolver { + if let Some(resolved_type) = resolver(path, fields) { + inferred_type = Some(resolved_type); + } + } + } + } + DefinitionResult::Local(range) => { + if matches!(inferred_type.as_deref(), None | Some("any")) { + inferred_type = definition_value_type(document, analysis, range); + } + } + DefinitionResult::Import(path) => { + if inferred_is_any { + if let Some(resolver) = import_field_type_resolver { + if let Some(resolved_type) = resolver(path, &[]) { + inferred_type = Some(resolved_type); + } + } + } + } + } + + let type_markdown = inferred_type.map(|ty| format!("`{ty}`")); + + let (kind, context_markdown) = match &result { + DefinitionResult::Local(_) => (LocalHoverKind::Local, None), + DefinitionResult::Import(path) => (LocalHoverKind::Import, Some(format!("`{path}`"))), + DefinitionResult::ImportField { path, fields } => { + let field_chain = fields.join("."); + ( + LocalHoverKind::ImportField, + Some(format!("`{field_chain}` from `{path}`")), + ) + } + }; + let preview_code = preview_range_for_result(&result, declaration_range) + .and_then(|range| definition_preview(document, range)); + + Some(LocalHoverSections { + kind, + type_markdown, + context_markdown, + preview_code, + }) +} + +fn local_range_from_result(result: DefinitionResult) -> Option { + match result { + DefinitionResult::Local(range) => Some(range), + DefinitionResult::Import(_) | DefinitionResult::ImportField { .. } => None, + } +} + +fn preview_range_for_result( + result: &DefinitionResult, + declaration_range: Option, +) -> Option { + match result { + DefinitionResult::Local(range) => Some(*range), + DefinitionResult::Import(_) | DefinitionResult::ImportField { .. } => declaration_range, + } +} + +fn definition_preview(document: &Document, range: lsp_types::Range) -> Option { + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + let def_pos: LspPosition = (range.start.line, range.start.character).into(); + let def_offset = line_index.offset(def_pos, text)?; + let token = token_at_offset(ast.syntax(), def_offset)?; + let preview_node = preview_node_for_definition_token(&token)?; + + let preview_text = preview_node.text().to_string(); + render_preview(&preview_text) +} + +fn preview_node_for_definition_token(token: &SyntaxToken) -> Option { + let name_node = token.parent()?; + + name_node + .ancestors() + .find_map(MemberBindStmt::cast) + .map(|node| node.syntax().clone()) + .or_else(|| { + name_node + .ancestors() + .find_map(MemberFieldMethod::cast) + .map(|node| node.syntax().clone()) + }) + .or_else(|| { + name_node + .ancestors() + .find_map(ForSpec::cast) + .map(|node| node.syntax().clone()) + }) + .or_else(|| { + name_node + .ancestors() + .find_map(ExprFunction::cast) + .map(|node| node.syntax().clone()) + }) + .or_else(|| { + name_node + .ancestors() + .find_map(StmtLocal::cast) + .map(|node| node.syntax().clone()) + }) + .or_else(|| { + name_node + .ancestors() + .find_map(Bind::cast) + .map(|node| node.syntax().clone()) + }) + .or_else(|| { + name_node + .ancestors() + .find_map(Param::cast) + .map(|node| node.syntax().clone()) + }) +} + +fn local_definition_at_offset(document: &Document, offset: ByteOffset) -> Option { + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name_node = token.parent()?; + let name_range = name_node.text_range(); + + if let Some(bind) = name_node.ancestors().find_map(Bind::cast) { + if let Some(definition_range) = bind_definition_range(&bind) { + if name_range == definition_range { + return Some(DefinitionResult::Local(to_lsp_range( + definition_range, + document.line_index(), + document.text(), + ))); + } + } + } + + if let Some(param) = name_node.ancestors().find_map(Param::cast) { + if let Some(definition_range) = param_definition_range(¶m) { + if name_range == definition_range { + return Some(DefinitionResult::Local(to_lsp_range( + definition_range, + document.line_index(), + document.text(), + ))); + } + } + } + + if let Some(for_spec) = name_node.ancestors().find_map(ForSpec::cast) { + if let Some(definition_range) = for_spec_definition_range(&for_spec) { + if name_range == definition_range { + return Some(DefinitionResult::Local(to_lsp_range( + definition_range, + document.line_index(), + document.text(), + ))); + } + } + } + + None +} + +fn bind_definition_range(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bind) => destruct_definition_range(bind.into()?), + Bind::BindFunction(bind) => Some(bind.name()?.syntax().text_range()), + } +} + +fn param_definition_range(param: &Param) -> Option { + destruct_definition_range(param.destruct()?) +} + +fn for_spec_definition_range(for_spec: &ForSpec) -> Option { + destruct_definition_range(for_spec.bind()?) +} + +fn destruct_definition_range(destruct: Destruct) -> Option { + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) +} + +fn definition_value_type( + document: &Document, + analysis: &TypeAnalysis, + range: &lsp_types::Range, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + let def_pos: LspPosition = (range.start.line, range.start.character).into(); + let def_offset = line_index.offset(def_pos, text)?; + let token = token_at_offset(ast.syntax(), def_offset)?; + let name_node = token.parent()?; + + let bind = name_node.ancestors().find_map(Bind::cast)?; + if bind_definition_range(&bind) != Some(name_node.text_range()) { + return None; + } + + let value = match bind { + Bind::BindDestruct(bind) => bind.value()?, + Bind::BindFunction(bind) => bind.value()?, + }; + let ty = analysis.type_for_range(value.syntax().text_range())?; + Some(analysis.display_for_hover(ty)) +} diff --git a/crates/jrsonnet-lsp-handlers/src/hover/mod.rs b/crates/jrsonnet-lsp-handlers/src/hover/mod.rs new file mode 100644 index 00000000..3834aa00 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/mod.rs @@ -0,0 +1,18 @@ +//! Hover handler for providing documentation on hover. +//! +//! Supports: +//! - Standard library functions (std.*) +//! - Local variable definitions (shows first few lines) + +mod handler; +mod local; +mod model; +mod preview_policy; +mod stdlib; +mod target; + +pub use handler::{hover, hover_with_import_field_type}; + +/// Maximum number of lines to show in hover for local definitions. +pub(super) const MAX_HOVER_LINES: usize = 5; +pub(super) type ImportFieldTypeResolver<'a> = dyn Fn(&str, &[String]) -> Option + 'a; diff --git a/crates/jrsonnet-lsp-handlers/src/hover/model.rs b/crates/jrsonnet-lsp-handlers/src/hover/model.rs new file mode 100644 index 00000000..767dc007 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/model.rs @@ -0,0 +1,293 @@ +use std::fmt; + +use lsp_types::{HoverContents, LanguageString, MarkedString}; + +use super::target::{HoverSectionSlot, HoverTargetPolicy}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) struct HoverCodeBlock { + pub(super) language: String, + pub(super) value: String, +} + +impl HoverCodeBlock { + #[must_use] + pub(super) fn jsonnet(value: String) -> Self { + Self { + language: "jsonnet".to_string(), + value, + } + } + + fn into_marked_string(self) -> Option { + let code = self.value.trim(); + (!code.is_empty()).then(|| { + MarkedString::LanguageString(LanguageString { + language: self.language, + value: code.to_string(), + }) + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) enum HoverDocMarkdown { + Raw(String), + StdlibExampleHeading, + TokenMultiPurposeIntro, + TokenPurpose { index: Option, doc: String }, +} + +impl HoverDocMarkdown { + #[must_use] + pub(super) fn to_markdown(&self) -> String { + match self { + Self::Raw(markdown) => markdown.clone(), + Self::StdlibExampleHeading => "**Example:**".to_string(), + Self::TokenMultiPurposeIntro => "This token has multiple purposes:".to_string(), + Self::TokenPurpose { index, doc } => index.map_or_else( + || doc.clone(), + |index| format!("**Purpose {index}**\n\n{doc}"), + ), + } + } +} + +impl fmt::Display for HoverDocMarkdown { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.to_markdown()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) struct HoverTokenPurpose { + pub(super) doc: String, + pub(super) example: String, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) enum HoverDocBlock { + Stdlib { + name: String, + signature: String, + description: String, + example: Option, + }, + TokenPurposes(Vec), +} + +impl HoverDocBlock { + #[must_use] + pub(super) fn into_sections(self) -> Vec { + match self { + Self::Stdlib { + name, + signature, + description, + example, + } => { + let mut sections = vec![ + HoverBlock::DocsCode(HoverCodeBlock::jsonnet(format!( + "std.{name}{signature})" + ))), + HoverBlock::DocsText(HoverDocMarkdown::Raw(description)), + ]; + if let Some(example) = example { + sections.push(HoverBlock::DocsText(HoverDocMarkdown::StdlibExampleHeading)); + sections.push(HoverBlock::DocsCode(HoverCodeBlock::jsonnet(example))); + } + sections + } + Self::TokenPurposes(purposes) => { + let multiple_purposes = purposes.len() > 1; + let mut sections = Vec::new(); + if multiple_purposes { + sections.push(HoverBlock::DocsText( + HoverDocMarkdown::TokenMultiPurposeIntro, + )); + } + for (idx, purpose) in purposes.into_iter().enumerate() { + sections.push(HoverBlock::DocsText(HoverDocMarkdown::TokenPurpose { + index: multiple_purposes.then_some(idx + 1), + doc: purpose.doc, + })); + sections.push(HoverBlock::DocsCode(HoverCodeBlock::jsonnet( + purpose.example, + ))); + } + sections + } + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) enum HoverBlock { + Type(String), + Context(String), + DocsText(HoverDocMarkdown), + DocsCode(HoverCodeBlock), + Preview(HoverCodeBlock), +} + +impl HoverBlock { + fn into_marked_string(self) -> Option { + match self { + Self::Type(markdown) | Self::Context(markdown) => { + let markdown = markdown.trim(); + (!markdown.is_empty()).then(|| MarkedString::String(markdown.to_string())) + } + Self::DocsText(markdown) => { + let markdown = markdown.to_markdown(); + let markdown = markdown.trim(); + (!markdown.is_empty()).then(|| MarkedString::String(markdown.to_string())) + } + Self::DocsCode(code) | Self::Preview(code) => code.into_marked_string(), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub(super) struct HoverFacts { + pub(super) type_markdown: Option, + pub(super) context_markdown: Option, + pub(super) docs: Vec, + pub(super) preview_code: Option, +} + +pub(super) fn compose_hover_contents_with_policy( + policy: HoverTargetPolicy, + facts: HoverFacts, +) -> Option { + let mut type_markdown = facts.type_markdown; + let mut context_markdown = facts.context_markdown; + let mut docs = facts.docs; + let mut preview_code = facts.preview_code; + + let mut blocks = Vec::new(); + let slots = [ + HoverSectionSlot::Type, + HoverSectionSlot::Context, + HoverSectionSlot::Docs, + HoverSectionSlot::Preview, + ]; + for slot in slots { + if !policy.allows(slot) { + continue; + } + match slot { + HoverSectionSlot::Type => { + if let Some(value) = type_markdown.take() { + blocks.push(HoverBlock::Type(value)); + } + } + HoverSectionSlot::Context => { + if let Some(value) = context_markdown.take() { + blocks.push(HoverBlock::Context(value)); + } + } + HoverSectionSlot::Docs => { + for doc in std::mem::take(&mut docs) { + blocks.extend(doc.into_sections()); + } + } + HoverSectionSlot::Preview => { + if let Some(value) = preview_code.take() { + blocks.push(HoverBlock::Preview(HoverCodeBlock::jsonnet(value))); + } + } + } + } + + render_hover_blocks(blocks) +} + +fn render_hover_blocks(blocks: Vec) -> Option { + let mut unique_sections = Vec::new(); + for section in blocks + .into_iter() + .filter_map(HoverBlock::into_marked_string) + { + if unique_sections.iter().all(|existing| existing != §ion) { + unique_sections.push(section); + } + } + (!unique_sections.is_empty()).then_some(HoverContents::Array(unique_sections)) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::hover::target::HoverTargetKind; + + #[test] + fn test_compose_hover_contents_uses_fixed_slot_order() { + let contents = compose_hover_contents_with_policy( + HoverTargetPolicy::for_kind(HoverTargetKind::StdlibSymbol), + HoverFacts { + type_markdown: Some("`number`".to_string()), + context_markdown: Some("`ignored`".to_string()), + docs: vec![HoverDocBlock::TokenPurposes(vec![HoverTokenPurpose { + doc: "Literal `null` value.".to_string(), + example: "null".to_string(), + }])], + preview_code: Some("x".to_string()), + }, + ) + .expect("expected hover contents"); + + let HoverContents::Array(sections) = contents else { + panic!("expected hover array contents"); + }; + assert_eq!(sections.len(), 4); + assert_eq!(sections[0], MarkedString::String("`number`".to_string())); + assert_eq!( + sections[1], + MarkedString::String("Literal `null` value.".to_string()) + ); + assert_eq!( + sections[2], + MarkedString::LanguageString(LanguageString { + language: "jsonnet".to_string(), + value: "null".to_string(), + }) + ); + assert_eq!( + sections[3], + MarkedString::LanguageString(LanguageString { + language: "jsonnet".to_string(), + value: "x".to_string(), + }) + ); + } + + #[test] + fn test_compose_hover_contents_respects_policy_caps() { + let contents = compose_hover_contents_with_policy( + HoverTargetPolicy::for_kind(HoverTargetKind::LocalRef), + HoverFacts { + type_markdown: Some("`number`".to_string()), + context_markdown: Some("`context`".to_string()), + docs: vec![HoverDocBlock::TokenPurposes(vec![HoverTokenPurpose { + doc: "doc".to_string(), + example: "expr".to_string(), + }])], + preview_code: Some("local x = 1;".to_string()), + }, + ) + .expect("expected hover contents"); + + let HoverContents::Array(sections) = contents else { + panic!("expected hover array contents"); + }; + assert_eq!(sections.len(), 2); + assert_eq!(sections[0], MarkedString::String("`number`".to_string())); + assert_eq!( + sections[1], + MarkedString::LanguageString(LanguageString { + language: "jsonnet".to_string(), + value: "local x = 1;".to_string(), + }) + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/hover/preview_policy.rs b/crates/jrsonnet-lsp-handlers/src/hover/preview_policy.rs new file mode 100644 index 00000000..d9d2fb67 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/preview_policy.rs @@ -0,0 +1,157 @@ +use jrsonnet_rowan_parser::{SyntaxKind, SyntaxToken}; + +use super::MAX_HOVER_LINES; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum HoverPreviewCandidateKind { + DefinitionSite, + MemberBindingOrFunctionSugar, + Expression, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum HoverPreviewNodeClass { + MemberBindingOrFunctionSugar, + Expression, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +struct HoverPreviewSyntaxKind(SyntaxKind); + +impl HoverPreviewSyntaxKind { + fn classify(self) -> Option { + let kind = self.0; + if matches!( + kind, + SyntaxKind::MEMBER_BIND_STMT + | SyntaxKind::MEMBER_FIELD_METHOD + | SyntaxKind::MEMBER_FIELD_NORMAL + | SyntaxKind::MEMBER_ASSERT_STMT + | SyntaxKind::FOR_SPEC + | SyntaxKind::STMT_LOCAL + | SyntaxKind::BIND_DESTRUCT + | SyntaxKind::BIND_FUNCTION + | SyntaxKind::PARAM + | SyntaxKind::EXPR_FUNCTION + ) { + return Some(HoverPreviewNodeClass::MemberBindingOrFunctionSugar); + } + if matches!( + kind, + SyntaxKind::EXPR_IF_THEN_ELSE + | SyntaxKind::EXPR_ARRAY_COMP + | SyntaxKind::EXPR_OBJECT + | SyntaxKind::EXPR_ARRAY + | SyntaxKind::EXPR_CALL + | SyntaxKind::EXPR_FIELD + | SyntaxKind::EXPR_INDEX + | SyntaxKind::EXPR_SLICE + | SyntaxKind::EXPR_BINARY + | SyntaxKind::EXPR_UNARY + | SyntaxKind::EXPR_OBJ_EXTEND + | SyntaxKind::EXPR_IMPORT + | SyntaxKind::EXPR_PARENED + | SyntaxKind::EXPR_LITERAL + | SyntaxKind::EXPR_STRING + | SyntaxKind::EXPR_NUMBER + ) { + return Some(HoverPreviewNodeClass::Expression); + } + None + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct HoverPreviewCandidate { + kind: HoverPreviewCandidateKind, + depth: usize, + preview_code: String, +} + +impl HoverPreviewCandidate { + fn rank_key(&self) -> (u8, usize, u8, usize) { + let (source_rank, depth_rank, kind_tiebreak) = match self.kind { + HoverPreviewCandidateKind::DefinitionSite => (0, 0, 0), + HoverPreviewCandidateKind::MemberBindingOrFunctionSugar => (1, self.depth, 0), + HoverPreviewCandidateKind::Expression => (1, self.depth, 1), + }; + // For equally ranked candidates, prefer richer snippets. + let richness_rank = usize::MAX.saturating_sub(self.preview_code.len()); + (source_rank, depth_rank, kind_tiebreak, richness_rank) + } +} + +pub(super) fn select_preview_code( + token: &SyntaxToken, + definition_preview: Option, +) -> Option { + collect_preview_candidates(token, definition_preview) + .into_iter() + .min_by_key(HoverPreviewCandidate::rank_key) + .map(|candidate| candidate.preview_code) +} + +pub(super) fn render_preview(preview_text: &str) -> Option { + let mut preview_lines: Vec<&str> = preview_text.lines().collect(); + if preview_lines.is_empty() { + return None; + } + + let truncated = preview_lines.len() > MAX_HOVER_LINES; + if truncated { + preview_lines.truncate(MAX_HOVER_LINES); + } + + let mut preview = preview_lines.join("\n"); + if truncated { + preview.push_str("\n..."); + } + Some(preview) +} + +fn collect_preview_candidates( + token: &SyntaxToken, + definition_preview: Option, +) -> Vec { + let mut candidates = Vec::new(); + + if let Some(preview_code) = definition_preview { + let preview_code = preview_code.trim().to_string(); + if !preview_code.is_empty() { + candidates.push(HoverPreviewCandidate { + kind: HoverPreviewCandidateKind::DefinitionSite, + depth: 0, + preview_code, + }); + } + } + + let Some(token_node) = token.parent() else { + return candidates; + }; + for (depth, node) in token_node.ancestors().enumerate() { + let Some(kind) = preview_candidate_kind_for_node(node.kind()) else { + continue; + }; + let node_text = node.text().to_string(); + let Some(preview_code) = render_preview(&node_text) else { + continue; + }; + candidates.push(HoverPreviewCandidate { + kind, + depth, + preview_code, + }); + } + + candidates +} + +fn preview_candidate_kind_for_node(kind: SyntaxKind) -> Option { + match HoverPreviewSyntaxKind(kind).classify()? { + HoverPreviewNodeClass::MemberBindingOrFunctionSugar => { + Some(HoverPreviewCandidateKind::MemberBindingOrFunctionSugar) + } + HoverPreviewNodeClass::Expression => Some(HoverPreviewCandidateKind::Expression), + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/hover/stdlib.rs b/crates/jrsonnet-lsp-handlers/src/hover/stdlib.rs new file mode 100644 index 00000000..bf11c8a0 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/stdlib.rs @@ -0,0 +1,29 @@ +use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; +use jrsonnet_lsp_stdlib as stdlib; +use jrsonnet_rowan_parser::{nodes::ExprField, AstNode, SyntaxKind, SyntaxToken}; + +/// Check if the token is a stdlib function call and return structured docs. +pub(super) fn stdlib_hover_doc(token: &SyntaxToken) -> Option<&'static stdlib::StdlibDoc> { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name = token.text(); + + // Walk up: IDENT -> NAME -> EXPR_FIELD + let parent = token.parent()?; + if parent.kind() != SyntaxKind::NAME { + return None; + } + + let field = ExprField::cast(parent.parent()?)?; + + // Check if base resolves to builtin std. + let base = field.base()?; + if !expr_resolves_to_builtin_std(&base) { + return None; + } + + stdlib::ensure_initialized(); + stdlib::get_stdlib_doc(name) +} diff --git a/crates/jrsonnet-lsp-handlers/src/hover/target.rs b/crates/jrsonnet-lsp-handlers/src/hover/target.rs new file mode 100644 index 00000000..6e23618e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/target.rs @@ -0,0 +1,139 @@ +use super::local::LocalHoverKind; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) enum HoverTargetKind { + ImportFieldRef, + ImportRef, + LocalRef, + StdlibSymbol, + TokenDoc, + Expression, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) struct HoverTarget { + pub(super) kind: HoverTargetKind, +} + +impl HoverTarget { + #[must_use] + pub(super) fn classify( + local_kind: Option, + has_stdlib_docs: bool, + has_token_docs: bool, + ) -> Self { + let kind = if matches!(local_kind, Some(LocalHoverKind::ImportField)) { + HoverTargetKind::ImportFieldRef + } else if matches!(local_kind, Some(LocalHoverKind::Import)) { + HoverTargetKind::ImportRef + } else if matches!(local_kind, Some(LocalHoverKind::Local)) { + HoverTargetKind::LocalRef + } else if has_stdlib_docs { + HoverTargetKind::StdlibSymbol + } else if has_token_docs { + HoverTargetKind::TokenDoc + } else { + HoverTargetKind::Expression + }; + Self { kind } + } + + #[must_use] + pub(super) fn policy(self) -> HoverTargetPolicy { + HoverTargetPolicy::for_kind(self.kind) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) enum HoverSectionSlot { + Type, + Context, + Docs, + Preview, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) struct HoverTargetPolicy { + allowed_slots: &'static [HoverSectionSlot], +} + +impl HoverTargetPolicy { + #[must_use] + pub(super) fn for_kind(kind: HoverTargetKind) -> Self { + const IMPORT_SLOTS: &[HoverSectionSlot] = &[ + HoverSectionSlot::Type, + HoverSectionSlot::Context, + HoverSectionSlot::Preview, + ]; + const LOCAL_OR_EXPR_SLOTS: &[HoverSectionSlot] = + &[HoverSectionSlot::Type, HoverSectionSlot::Preview]; + const DOCS_SLOTS: &[HoverSectionSlot] = &[ + HoverSectionSlot::Type, + HoverSectionSlot::Docs, + HoverSectionSlot::Preview, + ]; + + match kind { + HoverTargetKind::ImportFieldRef | HoverTargetKind::ImportRef => Self { + allowed_slots: IMPORT_SLOTS, + }, + HoverTargetKind::LocalRef | HoverTargetKind::Expression => Self { + allowed_slots: LOCAL_OR_EXPR_SLOTS, + }, + HoverTargetKind::StdlibSymbol | HoverTargetKind::TokenDoc => Self { + allowed_slots: DOCS_SLOTS, + }, + } + } + + #[must_use] + pub(super) fn allows(self, slot: HoverSectionSlot) -> bool { + self.allowed_slots.contains(&slot) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_hover_target_priority_prefers_local_refs_over_docs() { + let target = HoverTarget::classify(Some(LocalHoverKind::ImportField), true, true); + assert_eq!(target.kind, HoverTargetKind::ImportFieldRef); + + let target = HoverTarget::classify(Some(LocalHoverKind::Import), true, true); + assert_eq!(target.kind, HoverTargetKind::ImportRef); + + let target = HoverTarget::classify(Some(LocalHoverKind::Local), true, true); + assert_eq!(target.kind, HoverTargetKind::LocalRef); + } + + #[test] + fn test_hover_target_priority_prefers_stdlib_over_token_docs() { + let target = HoverTarget::classify(None, true, true); + assert_eq!(target.kind, HoverTargetKind::StdlibSymbol); + } + + #[test] + fn test_hover_target_defaults_to_expression() { + let target = HoverTarget::classify(None, false, false); + assert_eq!(target.kind, HoverTargetKind::Expression); + } + + #[test] + fn test_hover_target_policy_matrix() { + let import_field = HoverTargetPolicy::for_kind(HoverTargetKind::ImportFieldRef); + assert!(import_field.allows(HoverSectionSlot::Type)); + assert!(import_field.allows(HoverSectionSlot::Context)); + assert!(import_field.allows(HoverSectionSlot::Preview)); + assert!(!import_field.allows(HoverSectionSlot::Docs)); + + let token_doc = HoverTargetPolicy::for_kind(HoverTargetKind::TokenDoc); + assert!(token_doc.allows(HoverSectionSlot::Docs)); + assert!(!token_doc.allows(HoverSectionSlot::Context)); + + let expression = HoverTargetPolicy::for_kind(HoverTargetKind::Expression); + assert!(!expression.allows(HoverSectionSlot::Docs)); + assert!(expression.allows(HoverSectionSlot::Preview)); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs new file mode 100644 index 00000000..791297e0 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs @@ -0,0 +1,1642 @@ +//! Inlay hint handler. +//! +//! Provides type hints for local bindings and local function return values. + +use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, BindDestruct, BindFunction, Destruct, ExprCall, ExprFunction, ForSpec, Member, + MemberFieldMethod, MemberFieldNormal, ParamsDesc, StmtLocal, + }, + AstNode, +}; +use lsp_types::{InlayHint, InlayHintKind, InlayHintLabel, Position, Range}; +use rowan::TextRange; +use serde::{Deserialize, Serialize}; +use strum::{Display, EnumString}; + +/// Category selection for local-binding inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum LocalHintsMode { + Off, + Variables, + Functions, + #[default] + All, +} + +impl LocalHintsMode { + const fn variable_hints_enabled(self) -> bool { + matches!(self, Self::Variables | Self::All) + } + + const fn function_hints_enabled(self) -> bool { + matches!(self, Self::Functions | Self::All) + } +} + +/// Category selection for object member inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum ObjectMemberHintsMode { + #[default] + Off, + Fields, + Methods, + All, +} + +impl ObjectMemberHintsMode { + const fn field_hints_enabled(self) -> bool { + matches!(self, Self::Fields | Self::All) + } + + const fn method_hints_enabled(self) -> bool { + matches!(self, Self::Methods | Self::All) + } +} + +/// Category selection for function parameter inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum FunctionParameterHintsMode { + #[default] + Off, + All, +} + +impl FunctionParameterHintsMode { + const fn enabled(self) -> bool { + matches!(self, Self::All) + } +} + +/// Category selection for anonymous function return inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum AnonymousFunctionReturnHintsMode { + #[default] + Off, + All, +} + +impl AnonymousFunctionReturnHintsMode { + const fn enabled(self) -> bool { + matches!(self, Self::All) + } +} + +/// Category selection for call-argument inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum CallArgumentHintsMode { + #[default] + Off, + All, +} + +impl CallArgumentHintsMode { + const fn enabled(self) -> bool { + matches!(self, Self::All) + } +} + +/// Category selection for comprehension binding inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum ComprehensionHintsMode { + #[default] + Off, + All, +} + +impl ComprehensionHintsMode { + const fn enabled(self) -> bool { + matches!(self, Self::All) + } +} + +/// Category selection for destructuring binding inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum DestructuringHintsMode { + #[default] + Off, + All, +} + +impl DestructuringHintsMode { + const fn enabled(self) -> bool { + matches!(self, Self::All) + } +} + +/// Configuration for inlay hint generation. +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(default, rename_all = "camelCase")] +pub struct InlayHintsConfig { + /// Category filter for top-level `local` bindings. + pub local: LocalHintsMode, + /// Category filter for object-local `local` bindings. + pub object_local: LocalHintsMode, + /// Category filter for object fields and methods. + pub object_members: ObjectMemberHintsMode, + /// Category filter for function parameter type hints. + pub function_parameters: FunctionParameterHintsMode, + /// Category filter for anonymous function return type hints. + pub anonymous_function_returns: AnonymousFunctionReturnHintsMode, + /// Category filter for call-argument parameter-name hints. + pub call_arguments: CallArgumentHintsMode, + /// Category filter for comprehension variable hints. + pub comprehensions: ComprehensionHintsMode, + /// Category filter for destructuring variable hints. + pub destructuring: DestructuringHintsMode, +} + +impl Default for InlayHintsConfig { + fn default() -> Self { + Self { + local: LocalHintsMode::All, + object_local: LocalHintsMode::All, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, + } + } +} + +fn type_hint(position: Position, label: String) -> InlayHint { + InlayHint { + position, + label: InlayHintLabel::String(label), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + } +} + +fn parameter_hint(position: Position, label: String) -> InlayHint { + InlayHint { + position, + label: InlayHintLabel::String(label), + kind: Some(InlayHintKind::PARAMETER), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: Some(true), + data: None, + } +} + +fn position_in_range(pos: Position, range: Range) -> bool { + (range.start.line, range.start.character) <= (pos.line, pos.character) + && (pos.line, pos.character) <= (range.end.line, range.end.character) +} + +fn is_uninformative_type(type_str: &str) -> bool { + type_str == "any" || type_str == "function" +} + +/// Compute inlay hints for a visible range in a document. +pub fn inlay_hints( + document: &Document, + analysis: &TypeAnalysis, + visible_range: Range, +) -> Vec { + inlay_hints_with_config( + document, + analysis, + visible_range, + &InlayHintsConfig::default(), + ) +} + +/// Compute inlay hints for a visible range in a document with feature flags. +pub fn inlay_hints_with_config( + document: &Document, + analysis: &TypeAnalysis, + visible_range: Range, + config: &InlayHintsConfig, +) -> Vec { + let ast = document.ast(); + let text = document.text(); + let line_index = document.line_index(); + let mut hints = Vec::new(); + + for stmt_local in ast.syntax().descendants().filter_map(StmtLocal::cast) { + for bind in stmt_local.binds() { + match bind { + Bind::BindDestruct(bind_destruct) => { + if config.local.variable_hints_enabled() { + push_binding_type_hint( + &mut hints, + &bind_destruct, + analysis, + visible_range, + line_index, + text, + ); + } + if config.destructuring.enabled() { + push_destructuring_binding_hints( + &mut hints, + &bind_destruct, + analysis, + visible_range, + line_index, + text, + ); + } + } + Bind::BindFunction(bind_function) if config.local.function_hints_enabled() => { + push_function_return_hint( + &mut hints, + &bind_function, + analysis, + visible_range, + line_index, + text, + ); + } + Bind::BindFunction(_) => {} + } + } + } + + for member_list in ast + .syntax() + .descendants() + .filter_map(jrsonnet_rowan_parser::nodes::ObjBodyMemberList::cast) + { + for member in member_list.members() { + match member { + Member::MemberBindStmt(bind_stmt) => { + let Some(bind) = bind_stmt.obj_local().and_then(|obj_local| obj_local.bind()) + else { + continue; + }; + match bind { + Bind::BindDestruct(bind_destruct) => { + if config.object_local.variable_hints_enabled() { + push_binding_type_hint( + &mut hints, + &bind_destruct, + analysis, + visible_range, + line_index, + text, + ); + } + if config.destructuring.enabled() { + push_destructuring_binding_hints( + &mut hints, + &bind_destruct, + analysis, + visible_range, + line_index, + text, + ); + } + } + Bind::BindFunction(bind_function) + if config.object_local.function_hints_enabled() => + { + push_function_return_hint( + &mut hints, + &bind_function, + analysis, + visible_range, + line_index, + text, + ); + } + Bind::BindFunction(_) => {} + } + } + Member::MemberFieldNormal(field) if config.object_members.field_hints_enabled() => { + push_field_type_hint( + &mut hints, + &field, + analysis, + visible_range, + line_index, + text, + ); + } + Member::MemberFieldMethod(method) + if config.object_members.method_hints_enabled() => + { + push_method_return_hint( + &mut hints, + &method, + analysis, + visible_range, + line_index, + text, + ); + } + _ => {} + } + } + } + + if config.function_parameters.enabled() { + for bind_function in ast.syntax().descendants().filter_map(BindFunction::cast) { + push_bind_function_parameter_hints( + &mut hints, + &bind_function, + analysis, + visible_range, + line_index, + text, + ); + } + + for expr_function in ast.syntax().descendants().filter_map(ExprFunction::cast) { + push_expr_function_parameter_hints( + &mut hints, + &expr_function, + analysis, + visible_range, + line_index, + text, + ); + } + } + + if config.anonymous_function_returns.enabled() { + for expr_function in ast.syntax().descendants().filter_map(ExprFunction::cast) { + push_anonymous_function_return_hint( + &mut hints, + &expr_function, + analysis, + visible_range, + line_index, + text, + ); + } + } + + if config.call_arguments.enabled() { + for call in ast.syntax().descendants().filter_map(ExprCall::cast) { + push_call_argument_hints(&mut hints, &call, analysis, visible_range, line_index, text); + } + } + + if config.comprehensions.enabled() { + for for_spec in ast.syntax().descendants().filter_map(ForSpec::cast) { + push_comprehension_binding_hint( + &mut hints, + &for_spec, + config.destructuring.enabled(), + analysis, + visible_range, + line_index, + text, + ); + } + } + + hints +} + +fn push_binding_type_hint( + hints: &mut Vec, + bind_destruct: &BindDestruct, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(value) = bind_destruct.value() else { + return; + }; + let Some(destruct) = BindDestruct::into(bind_destruct) else { + return; + }; + let Destruct::DestructFull(full) = destruct else { + return; + }; + let Some(name_node) = full.name() else { + return; + }; + let Some(ident) = name_node.ident_lit() else { + return; + }; + let Some(ty) = analysis.type_for_range(value.syntax().text_range()) else { + return; + }; + + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + return; + } + + let name_range = to_lsp_range(ident.text_range(), line_index, text); + if position_in_range(name_range.end, visible_range) { + hints.push(type_hint(name_range.end, format!(": {type_str}"))); + } +} + +fn push_function_return_hint( + hints: &mut Vec, + bind_function: &BindFunction, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(name_node) = bind_function.name() else { + return; + }; + let Some(body) = bind_function.value() else { + return; + }; + let Some(ty) = analysis.type_for_range(body.syntax().text_range()) else { + return; + }; + + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + return; + } + + let name_range = to_lsp_range(name_node.syntax().text_range(), line_index, text); + if position_in_range(name_range.end, visible_range) { + hints.push(type_hint(name_range.end, format!(" -> {type_str}"))); + } +} + +fn push_field_type_hint( + hints: &mut Vec, + field: &MemberFieldNormal, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(field_name) = field.field_name() else { + return; + }; + let Some(value) = field.expr() else { + return; + }; + let Some(ty) = analysis.type_for_range(value.syntax().text_range()) else { + return; + }; + + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + return; + } + + let name_range = to_lsp_range(field_name.syntax().text_range(), line_index, text); + if position_in_range(name_range.end, visible_range) { + hints.push(type_hint(name_range.end, format!(": {type_str}"))); + } +} + +fn push_method_return_hint( + hints: &mut Vec, + method: &MemberFieldMethod, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(field_name) = method.field_name() else { + return; + }; + let Some(body) = method.expr() else { + return; + }; + let Some(ty) = analysis.type_for_range(body.syntax().text_range()) else { + return; + }; + + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + return; + } + + let name_range = to_lsp_range(field_name.syntax().text_range(), line_index, text); + if position_in_range(name_range.end, visible_range) { + hints.push(type_hint(name_range.end, format!(" -> {type_str}"))); + } +} + +fn push_bind_function_parameter_hints( + hints: &mut Vec, + bind_function: &BindFunction, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(params_desc) = bind_function.params() else { + return; + }; + let Some(param_types) = function_param_types(analysis, bind_function.syntax().text_range()) + else { + return; + }; + + push_function_parameter_hints( + hints, + ¶ms_desc, + param_types, + analysis, + visible_range, + line_index, + text, + ); +} + +fn push_expr_function_parameter_hints( + hints: &mut Vec, + expr_function: &ExprFunction, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(params_desc) = expr_function.params_desc() else { + return; + }; + let Some(param_types) = function_param_types(analysis, expr_function.syntax().text_range()) + else { + return; + }; + + push_function_parameter_hints( + hints, + ¶ms_desc, + param_types, + analysis, + visible_range, + line_index, + text, + ); +} + +fn function_param_types( + analysis: &TypeAnalysis, + range: TextRange, +) -> Option> { + let ty = analysis.type_for_range(range)?; + let function_data = analysis.get_function(ty)?; + Some( + function_data + .params + .into_iter() + .map(|param| param.ty) + .collect(), + ) +} + +fn push_function_parameter_hints( + hints: &mut Vec, + params_desc: &ParamsDesc, + param_types: Vec, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + for (param, param_ty) in params_desc.params().zip(param_types.into_iter()) { + let Some(destruct) = param.destruct() else { + continue; + }; + + let type_str = analysis.display(param_ty); + if is_uninformative_type(&type_str) { + continue; + } + + let destruct_range = to_lsp_range(destruct.syntax().text_range(), line_index, text); + if position_in_range(destruct_range.end, visible_range) { + hints.push(type_hint(destruct_range.end, format!(": {type_str}"))); + } + } +} + +fn push_anonymous_function_return_hint( + hints: &mut Vec, + expr_function: &ExprFunction, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(body) = expr_function.expr() else { + return; + }; + let Some(return_ty) = analysis.type_for_range(body.syntax().text_range()) else { + return; + }; + let type_str = analysis.display(return_ty); + if is_uninformative_type(&type_str) { + return; + } + + let hint_pos = expr_function + .params_desc() + .map(|params| to_lsp_range(params.syntax().text_range(), line_index, text).end) + .or_else(|| { + expr_function + .expr() + .map(|expr| to_lsp_range(expr.syntax().text_range(), line_index, text).start) + }); + let Some(hint_pos) = hint_pos else { + return; + }; + + if position_in_range(hint_pos, visible_range) { + hints.push(type_hint(hint_pos, format!(" -> {type_str}"))); + } +} + +fn push_call_argument_hints( + hints: &mut Vec, + call: &ExprCall, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(callee) = call.callee() else { + return; + }; + let Some(callee_ty) = analysis.type_for_range(callee.syntax().text_range()) else { + return; + }; + let Some(function_data) = analysis.get_function(callee_ty) else { + return; + }; + let Some(args_desc) = call.args_desc() else { + return; + }; + + for (index, arg) in args_desc.args().enumerate() { + if arg.name().is_some() { + continue; + } + let Some(expr) = arg.expr() else { + continue; + }; + + let param_name = function_data + .params + .get(index) + .map(|param| param.name.as_str()) + .or_else(|| { + if function_data.variadic { + function_data.params.last().map(|param| param.name.as_str()) + } else { + None + } + }); + let Some(param_name) = param_name else { + continue; + }; + + let hint_pos = to_lsp_range(expr.syntax().text_range(), line_index, text).start; + if position_in_range(hint_pos, visible_range) { + hints.push(parameter_hint(hint_pos, format!("{param_name}:"))); + } + } +} + +fn push_comprehension_binding_hint( + hints: &mut Vec, + for_spec: &ForSpec, + include_destructuring: bool, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(iter_expr) = for_spec.expr() else { + return; + }; + let Some(iter_ty) = analysis.type_for_range(iter_expr.syntax().text_range()) else { + return; + }; + let Some(elem_ty) = comprehension_element_type(analysis, iter_ty) else { + return; + }; + let type_str = analysis.display(elem_ty); + if is_uninformative_type(&type_str) { + return; + } + + let Some(destruct) = for_spec.bind() else { + return; + }; + match destruct { + Destruct::DestructFull(full) => { + let Some(name) = full.name() else { + return; + }; + let range = to_lsp_range(name.syntax().text_range(), line_index, text); + if position_in_range(range.end, visible_range) { + hints.push(type_hint(range.end, format!(": {type_str}"))); + } + } + _ if include_destructuring => { + push_destruct_hints( + hints, + &destruct, + Some(elem_ty), + analysis, + visible_range, + line_index, + text, + ); + } + _ => {} + } +} + +fn comprehension_element_type( + analysis: &TypeAnalysis, + iter_ty: jrsonnet_lsp_types::Ty, +) -> Option { + use jrsonnet_lsp_types::TyData; + + match analysis.get_data(iter_ty) { + TyData::Array { elem, .. } => Some(elem), + TyData::Tuple { elems } => { + if elems.is_empty() { + None + } else { + Some(analysis.union(elems)) + } + } + TyData::Union(variants) => { + let mut elem_types = Vec::new(); + for variant in variants { + match analysis.get_data(variant) { + TyData::Array { elem, .. } => elem_types.push(elem), + TyData::Tuple { elems } => { + if !elems.is_empty() { + elem_types.push(analysis.union(elems)); + } + } + _ => return None, + } + } + if elem_types.is_empty() { + None + } else { + Some(analysis.union(elem_types)) + } + } + _ => None, + } +} + +fn push_destructuring_binding_hints( + hints: &mut Vec, + bind_destruct: &BindDestruct, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(value) = bind_destruct.value() else { + return; + }; + let Some(destruct) = BindDestruct::into(bind_destruct) else { + return; + }; + if matches!(destruct, Destruct::DestructFull(_)) { + return; + } + let value_ty = analysis.type_for_range(value.syntax().text_range()); + + push_destruct_hints( + hints, + &destruct, + value_ty, + analysis, + visible_range, + line_index, + text, + ); +} + +fn push_destruct_hints( + hints: &mut Vec, + destruct: &Destruct, + source_ty: Option, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + match destruct { + Destruct::DestructFull(full) => { + let Some(name) = full.name() else { + return; + }; + let ty = source_ty.or_else(|| analysis.type_for_range(name.syntax().text_range())); + let Some(ty) = ty else { + return; + }; + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + return; + } + let range = to_lsp_range(name.syntax().text_range(), line_index, text); + if position_in_range(range.end, visible_range) { + hints.push(type_hint(range.end, format!(": {type_str}"))); + } + } + Destruct::DestructSkip(_) => {} + Destruct::DestructArray(array) => { + let mut elem_index = 0usize; + for part in array.destruct_array_parts() { + let jrsonnet_rowan_parser::nodes::DestructArrayPart::DestructArrayElement(elem) = + part + else { + continue; + }; + let Some(inner) = elem.destruct() else { + elem_index += 1; + continue; + }; + let elem_ty = + source_ty.and_then(|ty| array_destruct_elem_type(analysis, ty, elem_index)); + push_destruct_hints( + hints, + &inner, + elem_ty, + analysis, + visible_range, + line_index, + text, + ); + elem_index += 1; + } + } + Destruct::DestructObject(object) => { + for field in object.destruct_object_fields() { + let Some(field_name) = field.field().and_then(|name| name.ident_lit()) else { + continue; + }; + let field_ty = source_ty + .and_then(|ty| object_destruct_field_type(analysis, ty, field_name.text())); + + let Some(inner) = field.destruct() else { + let type_str = + field_ty.map_or_else(|| "any".to_string(), |ty| analysis.display(ty)); + if is_uninformative_type(&type_str) { + continue; + } + let range = to_lsp_range(field_name.text_range(), line_index, text); + if position_in_range(range.end, visible_range) { + hints.push(type_hint(range.end, format!(": {type_str}"))); + } + continue; + }; + push_destruct_hints( + hints, + &inner, + field_ty, + analysis, + visible_range, + line_index, + text, + ); + } + } + } +} + +fn array_destruct_elem_type( + analysis: &TypeAnalysis, + source_ty: jrsonnet_lsp_types::Ty, + index: usize, +) -> Option { + use jrsonnet_lsp_types::TyData; + + match analysis.get_data(source_ty) { + TyData::Array { elem, .. } => Some(elem), + TyData::Tuple { elems } => elems + .get(index) + .copied() + .or_else(|| (!elems.is_empty()).then(|| analysis.union(elems))), + TyData::Union(variants) => { + let mut out = Vec::new(); + for variant in variants { + let elem = array_destruct_elem_type(analysis, variant, index)?; + out.push(elem); + } + if out.is_empty() { + None + } else { + Some(analysis.union(out)) + } + } + TyData::Any => Some(jrsonnet_lsp_types::Ty::ANY), + _ => None, + } +} + +fn object_destruct_field_type( + analysis: &TypeAnalysis, + source_ty: jrsonnet_lsp_types::Ty, + field_name: &str, +) -> Option { + use jrsonnet_lsp_types::TyData; + + match analysis.get_data(source_ty) { + TyData::Object(object) => object.get_field(field_name).map(|field| field.ty), + TyData::AttrsOf { value } => Some(value), + TyData::Union(variants) => { + let mut out = Vec::new(); + for variant in variants { + let field_ty = object_destruct_field_type(analysis, variant, field_name)?; + out.push(field_ty); + } + if out.is_empty() { + None + } else { + Some(analysis.union(out)) + } + } + TyData::Any => Some(jrsonnet_lsp_types::Ty::ANY), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_types::GlobalTyStore; + use lsp_types::{InlayHint, InlayHintKind, InlayHintLabel, Position, Range}; + + use super::*; + + fn full_line_range() -> Range { + Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 200, + }, + } + } + + fn test_analysis(doc: &Document) -> TypeAnalysis { + let global_types = Arc::new(GlobalTyStore::new()); + TypeAnalysis::analyze_with_global(doc, global_types) + } + + fn assert_hints_eq(actual: &[InlayHint], expected: Vec) { + let actual_json = serde_json::to_value(actual).expect("actual hints should serialize"); + let expected_json = + serde_json::to_value(expected).expect("expected hints should serialize"); + assert_eq!(actual_json, expected_json); + } + + #[test] + fn test_local_binding_type_hint() { + let doc = Document::new("local x = 1; x".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let hints = inlay_hints(&doc, &analysis, full_line_range()); + + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 7, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_local_function_return_hint() { + let doc = Document::new("local f() = 42; f()".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let hints = inlay_hints(&doc, &analysis, full_line_range()); + + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 7, + }, + label: InlayHintLabel::String(" -> number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_object_local_binding_type_hint_default_enabled() { + let doc = Document::new("{ local x = 1, z: x }".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let hints = inlay_hints(&doc, &analysis, full_line_range()); + + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 9, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_object_field_hint_only_when_enabled() { + let doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Fields, + ..InlayHintsConfig::default() + }; + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 3, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_object_method_hint_only_when_enabled() { + let doc = Document::new("{ one(): 1 }".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Methods, + ..InlayHintsConfig::default() + }; + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 5, + }, + label: InlayHintLabel::String(" -> number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_config_can_disable_object_local_binding_hints() { + let doc = Document::new("{ local x = 1, z: x }".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + object_local: LocalHintsMode::Functions, + ..InlayHintsConfig::default() + }; + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq(&hints, vec![]); + } + + #[test] + fn test_inlay_hint_respects_visible_range() { + let doc = Document::new( + "local x = 1; local y = 2; x + y".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let range = Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 12, + }, + }; + + let hints = inlay_hints(&doc, &analysis, range); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 7, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_function_parameter_hints_for_bind_function_when_enabled() { + let doc = Document::new( + "local add(x, y=1) = x + y; add(1, 2)".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::All, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 14, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_function_parameter_hints_for_expr_function_when_enabled() { + let doc = Document::new( + "local f = function(a, b=1) a + b; f(1, 2)".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::All, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 23, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_anonymous_function_return_hint_when_enabled() { + let doc = Document::new("(function(x) 1)(2)".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::All, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 12, + }, + label: InlayHintLabel::String(" -> number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_call_argument_hints_for_local_function_when_enabled() { + let doc = Document::new( + "local add(x, y=1) = x + y; add(1, 2)".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::All, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![ + InlayHint { + position: Position { + line: 0, + character: 31, + }, + label: InlayHintLabel::String("x:".to_string()), + kind: Some(InlayHintKind::PARAMETER), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: Some(true), + data: None, + }, + InlayHint { + position: Position { + line: 0, + character: 34, + }, + label: InlayHintLabel::String("y:".to_string()), + kind: Some(InlayHintKind::PARAMETER), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: Some(true), + data: None, + }, + ], + ); + } + + #[test] + fn test_call_argument_hints_skip_named_arguments() { + let doc = Document::new( + "local add(x, y=1) = x + y; add(1, y=2)".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::All, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 31, + }, + label: InlayHintLabel::String("x:".to_string()), + kind: Some(InlayHintKind::PARAMETER), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: Some(true), + data: None, + }], + ); + } + + #[test] + fn test_comprehension_binding_hint_when_enabled() { + let doc = Document::new( + "[x + 1 for x in std.range(1, 3)]".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::All, + destructuring: DestructuringHintsMode::Off, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 12, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_comprehension_destructuring_hints_require_destructuring_mode() { + let doc = Document::new( + "[a + b for [a, b] in [[1, 2]]]".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::All, + destructuring: DestructuringHintsMode::Off, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq(&hints, vec![]); + } + + #[test] + fn test_comprehension_destructuring_binding_hints_when_enabled() { + let doc = Document::new( + "[a + b for [a, b] in [[1, 2]]]".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::All, + destructuring: DestructuringHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![ + InlayHint { + position: Position { + line: 0, + character: 13, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }, + InlayHint { + position: Position { + line: 0, + character: 16, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }, + ], + ); + } + + #[test] + fn test_destructuring_array_binding_hints_when_enabled() { + let doc = Document::new( + "local [a, b] = [1, 2]; a + b".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let bind_destruct = doc + .ast() + .syntax() + .descendants() + .find_map(BindDestruct::cast) + .expect("expected destruct bind"); + let value = bind_destruct.value().expect("expected bind value"); + assert!( + analysis + .type_for_range(value.syntax().text_range()) + .is_some(), + "destructuring RHS should have an inferred type" + ); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![ + InlayHint { + position: Position { + line: 0, + character: 8, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }, + InlayHint { + position: Position { + line: 0, + character: 11, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }, + ], + ); + } + + #[test] + fn test_destructuring_object_binding_hints_when_enabled() { + let doc = Document::new( + "local { foo: x } = { foo: 1 }; x".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let bind_destruct = doc + .ast() + .syntax() + .descendants() + .find_map(BindDestruct::cast) + .expect("expected destruct bind"); + let value = bind_destruct.value().expect("expected bind value"); + assert!( + analysis + .type_for_range(value.syntax().text_range()) + .is_some(), + "destructuring RHS should have an inferred type" + ); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 14, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs new file mode 100644 index 00000000..9aa39749 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -0,0 +1,48 @@ +//! LSP request and notification handlers. + +pub mod code_action; +pub mod code_lens; +pub mod completion; +pub mod definition; +pub mod document_highlight; +pub mod formatting; +pub mod hover; +pub mod inlay_hint; +pub mod references; +pub mod rename; +pub mod semantic_tokens; +pub mod signature_help; +pub mod symbols; + +pub use code_action::{code_actions, CodeActionConfig, RemoveUnusedCommentsMode, RemoveUnusedMode}; +pub use code_lens::{code_lens, resolve_code_lens, CodeLensConfig}; +pub use completion::{ + completion, completion_with_import_roots, completion_with_import_roots_and_semantic, +}; +pub use definition::{ + collect_visible_bindings, collect_visible_bindings_with_semantic, goto_declaration, + goto_declaration_with_semantic, goto_definition, goto_definition_with_semantic, BindingKind, + DefinitionResult, VisibleBinding, +}; +pub use document_highlight::document_highlights; +pub use formatting::{ + format_document, format_document_range, format_document_range_with_config, + format_document_with_config, FormattingCommentStyle, FormattingConfig, FormattingStringStyle, +}; +pub use hover::{hover, hover_with_import_field_type}; +pub use inlay_hint::{ + inlay_hints, inlay_hints_with_config, AnonymousFunctionReturnHintsMode, CallArgumentHintsMode, + ComprehensionHintsMode, DestructuringHintsMode, FunctionParameterHintsMode, InlayHintsConfig, + LocalHintsMode, ObjectMemberHintsMode, +}; +pub use references::{ + find_cross_file_references, find_cross_file_references_with_semantic, find_references, + find_references_with_semantic, +}; +pub use rename::{prepare_rename, rename, rename_cross_file}; +pub use semantic_tokens::{ + legend as semantic_tokens_legend, semantic_token_reference_markdown, semantic_tokens, + semantic_tokens_range, SemanticTokenModifierName, SemanticTokenTypeName, +}; +pub use signature_help::signature_help; +pub use symbols::{document_symbols, workspace_symbols_for_document}; diff --git a/crates/jrsonnet-lsp-handlers/src/references/cross_file.rs b/crates/jrsonnet-lsp-handlers/src/references/cross_file.rs new file mode 100644 index 00000000..1c5a150a --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/references/cross_file.rs @@ -0,0 +1,281 @@ +use jrsonnet_lsp_document::{ + to_lsp_range, token_at_offset, CanonicalPath, Document, FileId, LspPosition, +}; +use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_inference::SemanticArtifacts; +use jrsonnet_lsp_scope::{ + find_definition_range, is_at_file_scope, is_definition_site, is_variable_reference, +}; +use jrsonnet_rowan_parser::{ + nodes::{ExprBase, ExprField, ExprObject}, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use lsp_types::Location; +use rayon::prelude::*; +use rowan::TextRange; + +/// Find cross-file references to a symbol. +/// +/// This function searches all provided documents for references to a symbol +/// that is exported from the current document (i.e., accessible via import). +/// +/// Returns references from other documents that import this file and use the symbol. +/// +/// The `documents` parameter is a slice of (path, document reference) pairs representing +/// all open documents to search. +#[must_use] +pub fn find_cross_file_references<'a>( + current_document: &Document, + current_path: &CanonicalPath, + position: LspPosition, + documents: &[(&'a CanonicalPath, &'a Document)], + import_graph: &ImportGraph, +) -> Vec { + let docs_with_semantic: Vec<_> = documents + .iter() + .map(|(path, doc)| (*path, *doc, None)) + .collect(); + find_cross_file_references_with_semantic( + current_document, + current_path, + position, + None, + &docs_with_semantic, + import_graph, + ) +} + +/// Find cross-file references using semantic artifacts when available. +#[must_use] +pub fn find_cross_file_references_with_semantic<'a>( + current_document: &Document, + current_path: &CanonicalPath, + position: LspPosition, + current_semantic: Option<&SemanticArtifacts>, + documents: &[( + &'a CanonicalPath, + &'a Document, + Option<&'a SemanticArtifacts>, + )], + import_graph: &ImportGraph, +) -> Vec { + let text = current_document.text(); + let line_index = current_document.line_index(); + + // Convert LSP position to byte offset + let Some(offset) = line_index.offset(position, text) else { + return Vec::new(); + }; + + let ast = current_document.ast(); + + // Find the token at the offset + let Some(token) = token_at_offset(ast.syntax(), offset) else { + return Vec::new(); + }; + + // Must be an identifier + if token.kind() != SyntaxKind::IDENT { + return Vec::new(); + } + + let Some(name) = resolve_exported_symbol_name(current_document, &token, current_semantic) + else { + return Vec::new(); + }; + let Some(current_file) = import_graph.file(current_path) else { + return Vec::new(); + }; + + // Search all other documents for imports of this file (in parallel) + let references: Vec = documents + .par_iter() + .filter(|(doc_path, _, _)| *doc_path != current_path) + .flat_map(|(doc_path, doc, semantic)| { + let Some(importer_file) = import_graph.file(doc_path) else { + return Vec::new(); + }; + let import_bindings = import_binding_names(import_graph, importer_file, current_file); + if import_bindings.is_empty() { + return Vec::new(); + } + + let Ok(doc_uri) = doc_path.to_uri() else { + return Vec::new(); + }; + let doc_text = doc.text(); + let doc_line_index = doc.line_index(); + + import_bindings + .into_iter() + .flat_map(|binding_name| { + find_references_to_import(doc, &binding_name, &name, *semantic) + }) + .map(|range| Location { + uri: doc_uri.clone(), + range: to_lsp_range(range, doc_line_index, doc_text), + }) + .collect::>() + }) + .collect(); + + references +} + +fn import_binding_names( + import_graph: &ImportGraph, + importer_file: FileId, + target_file: FileId, +) -> Vec { + let mut bindings: Vec = import_graph + .imports_of_target(importer_file, target_file) + .into_iter() + .filter_map(|entry| entry.binding_name.clone()) + .collect(); + bindings.sort(); + bindings.dedup(); + bindings +} + +fn resolve_exported_symbol_name( + document: &Document, + token: &SyntaxToken, + semantic: Option<&SemanticArtifacts>, +) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + if is_top_level_object_field_definition(document, token) { + return Some(token.text().to_string()); + } + + if is_definition_site(token) && is_at_file_scope(token) { + return Some(token.text().to_string()); + } + + if !is_variable_reference(token) { + return None; + } + + let name = token.text(); + let definition_range = semantic + .and_then(|artifacts| artifacts.definition_for_ident_token(token)) + .or_else(|| find_definition_range(token, name))?; + let definition_token = definition_token(document, definition_range, name)?; + if !is_at_file_scope(&definition_token) { + return None; + } + + Some(name.to_string()) +} + +fn definition_token(document: &Document, range: TextRange, name: &str) -> Option { + document + .ast() + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .find(|token| { + token.kind() == SyntaxKind::IDENT + && token.text() == name + && is_definition_site(token) + && token + .parent() + .is_some_and(|parent| parent.text_range() == range) + }) +} + +fn is_top_level_object_field_definition(document: &Document, token: &SyntaxToken) -> bool { + if token.kind() != SyntaxKind::IDENT || field_definition_range(token).is_none() { + return false; + } + + let Some(root_expr) = document.ast().expr() else { + return false; + }; + let Some(ExprBase::ExprObject(root_object)) = root_expr.expr_base() else { + return false; + }; + let Some(field_object) = token.parent_ancestors().find_map(ExprObject::cast) else { + return false; + }; + + field_object.syntax().text_range() == root_object.syntax().text_range() +} + +fn field_definition_range(token: &SyntaxToken) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let mut node = token.parent(); + while let Some(current) = node { + if current.kind() == SyntaxKind::FIELD_NAME_FIXED { + let parent = current.parent()?; + if matches!( + parent.kind(), + SyntaxKind::MEMBER_FIELD_NORMAL | SyntaxKind::MEMBER_FIELD_METHOD + ) { + return Some(token.text_range()); + } + return None; + } + node = current.parent(); + } + + None +} + +/// Find references to an imported symbol in a document. +fn find_references_to_import( + doc: &Document, + binding_name: &str, + field_name: &str, + semantic: Option<&SemanticArtifacts>, +) -> Vec { + if let Some(artifacts) = semantic { + return artifacts + .import_field_references(binding_name, field_name) + .to_vec(); + } + + let mut references = Vec::new(); + let ast = doc.ast(); + + // Walk all tokens looking for field accesses on the imported name + for node in ast.syntax().descendants() { + // Look for field accesses: importName.fieldName + if node.kind() == SyntaxKind::EXPR_FIELD { + // Check if this is accessing the imported binding + if let Some(range) = check_field_access(&node, binding_name, field_name) { + references.push(range); + } + } + } + + references +} + +/// Check if a field access is accessing a specific field on a specific binding. +fn check_field_access( + node: &SyntaxNode, + binding_name: &str, + field_name: &str, +) -> Option { + let field = ExprField::cast(node.clone())?; + let field_ident = field.field()?.ident_lit()?; + if field_ident.text() != field_name { + return None; + } + + let base = field.base()?.expr_base()?; + let ExprBase::ExprVar(var) = base else { + return None; + }; + if var.name()?.ident_lit()?.text() != binding_name { + return None; + } + + Some(field_ident.text_range()) +} diff --git a/crates/jrsonnet-lsp-handlers/src/references/local.rs b/crates/jrsonnet-lsp-handlers/src/references/local.rs new file mode 100644 index 00000000..6069ee74 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/references/local.rs @@ -0,0 +1,404 @@ +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document, LspPosition}; +use jrsonnet_lsp_inference::SemanticArtifacts; +use jrsonnet_lsp_scope::{ + find_definition_range, is_definition_site, is_variable_reference, ScopeResolver, +}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind, SyntaxToken}; +use lsp_types::{Location, Uri}; +use rowan::TextRange; +use tracing::debug; + +/// Find all references to the symbol at the given position. +pub fn find_references( + document: &Document, + position: LspPosition, + uri: &Uri, + include_declaration: bool, +) -> Vec { + find_references_with_semantic(document, position, uri, include_declaration, None) +} + +/// Find all references to the symbol at the given position using semantic artifacts when available. +pub fn find_references_with_semantic( + document: &Document, + position: LspPosition, + uri: &Uri, + include_declaration: bool, + semantic: Option<&SemanticArtifacts>, +) -> Vec { + let text = document.text(); + let line_index = document.line_index(); + + let Some(offset) = line_index.offset(position, text) else { + return Vec::new(); + }; + + let ast = document.ast(); + + let Some(token) = token_at_offset(ast.syntax(), offset) else { + return Vec::new(); + }; + + if token.kind() != SyntaxKind::IDENT { + return Vec::new(); + } + + let name = token.text(); + + if let Some(field_definition) = field_definition_range(&token) { + let references = if include_declaration { + vec![field_definition] + } else { + Vec::new() + }; + return references + .into_iter() + .map(|range| Location { + uri: uri.clone(), + range: to_lsp_range(range, line_index, text), + }) + .collect(); + } + + let definition_range = semantic + .and_then(|artifacts| artifacts.definition_for_ident_token(&token)) + .or_else(|| { + if is_definition_site(&token) { + token.parent().map(|p| p.text_range()) + } else if is_variable_reference(&token) { + find_definition_range(&token, name) + } else { + None + } + }); + + let Some(def_range) = definition_range else { + return Vec::new(); + }; + + let mut references = semantic.map_or_else( + || { + // Build scope resolver for O(1) lookups + let resolver = ScopeResolver::new(ast.syntax()); + resolver.find_references(ast.syntax(), name, def_range) + }, + |artifacts| artifacts.references_for_definition(def_range).to_vec(), + ); + + if !include_declaration { + references.retain(|r| *r != def_range); + } + + debug!(name = %name, count = references.len(), "found references"); + + references + .into_iter() + .map(|range| Location { + uri: uri.clone(), + range: to_lsp_range(range, line_index, text), + }) + .collect() +} + +fn field_definition_range(token: &SyntaxToken) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let mut node = token.parent(); + while let Some(current) = node { + if current.kind() == SyntaxKind::FIELD_NAME_FIXED { + let parent = current.parent()?; + if matches!( + parent.kind(), + SyntaxKind::MEMBER_FIELD_NORMAL | SyntaxKind::MEMBER_FIELD_METHOD + ) { + return Some(token.text_range()); + } + return None; + } + node = current.parent(); + } + + None +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_scope::is_at_file_scope; + + use super::*; + + #[test] + fn test_find_local_variable_references() { + let code = "local x = 1; local y = x + x; x"; + // ^def ^ref ^ref ^ref + // 0123456789... + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Position on the definition of x + let pos = (0, 6).into(); + + let refs = find_references(&doc, pos, &uri, true); + // x at positions: 6 (def), 23, 27, 30 + assert_eq!( + refs, + vec![ + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + }, + }, + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 23 + }, + end: lsp_types::Position { + line: 0, + character: 24 + }, + }, + }, + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 27 + }, + end: lsp_types::Position { + line: 0, + character: 28 + }, + }, + }, + Location { + uri, + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 30 + }, + end: lsp_types::Position { + line: 0, + character: 31 + }, + }, + }, + ] + ); + } + + #[test] + fn test_find_references_exclude_declaration() { + let code = "local x = 1; x + x"; + // ^def ^ref ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let pos = (0, 6).into(); + + let refs = find_references(&doc, pos, &uri, false); + // Only the two uses at positions 13 and 17, not the definition + assert_eq!( + refs, + vec![ + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + }, + }, + Location { + uri, + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 17 + }, + end: lsp_types::Position { + line: 0, + character: 18 + }, + }, + }, + ] + ); + } + + #[test] + fn test_find_parameter_references() { + let code = "local f(x) = x * x; f(1)"; + // ^def ^ref ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Position on parameter x + let pos = (0, 8).into(); + + let refs = find_references(&doc, pos, &uri, true); + // x at positions: 8 (def), 13, 17 + assert_eq!( + refs, + vec![ + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 8 + }, + end: lsp_types::Position { + line: 0, + character: 9 + }, + }, + }, + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + }, + }, + Location { + uri, + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 17 + }, + end: lsp_types::Position { + line: 0, + character: 18 + }, + }, + }, + ] + ); + } + + #[test] + fn test_no_references_for_different_scope() { + let code = "local x = 1; local f(x) = x; x"; + // ^def1 ^def2 ^ref2 ^ref1 + // The last 'x' refers to def1, not def2 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Position on the outer x definition + let pos = (0, 6).into(); + + let refs = find_references(&doc, pos, &uri, true); + // Should find: the definition at 6 and the last reference at 29 (not the inner x) + assert_eq!( + refs, + vec![ + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + }, + }, + Location { + uri, + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 29 + }, + end: lsp_types::Position { + line: 0, + character: 30 + }, + }, + }, + ] + ); + } + + #[test] + fn test_is_at_file_scope() { + // Test file-scope detection helper + let code = "local x = 1; x"; + // ^file scope + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the 'x' identifier at the definition site + for t in ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if t.kind() == SyntaxKind::IDENT && t.text() == "x" && is_definition_site(&t) { + // The first x (definition) should be at file scope + assert!( + is_at_file_scope(&t), + "Top-level local should be at file scope" + ); + return; + } + } + panic!("Did not find definition token"); + } + + #[test] + fn test_nested_not_at_file_scope() { + // Nested definitions should not be at file scope + let code = "local f(x) = local y = 1; y; f(1)"; + // ^not file scope + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the 'y' identifier at the definition site (inside function) + for t in ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if t.kind() == SyntaxKind::IDENT && t.text() == "y" && is_definition_site(&t) { + // y is nested, should not be at file scope + assert!( + !is_at_file_scope(&t), + "Nested local should not be at file scope" + ); + return; + } + } + panic!("Did not find definition token"); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/references/mod.rs b/crates/jrsonnet-lsp-handlers/src/references/mod.rs new file mode 100644 index 00000000..fb788b70 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/references/mod.rs @@ -0,0 +1,10 @@ +//! Find references handler. +//! +//! Finds all references to a symbol within the current document and across +//! all open/importing documents (cross-file references). + +mod cross_file; +mod local; + +pub use cross_file::{find_cross_file_references, find_cross_file_references_with_semantic}; +pub use local::{find_references, find_references_with_semantic}; diff --git a/crates/jrsonnet-lsp-handlers/src/rename/common.rs b/crates/jrsonnet-lsp-handlers/src/rename/common.rs new file mode 100644 index 00000000..17f3158a --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/rename/common.rs @@ -0,0 +1,44 @@ +use std::collections::HashMap; + +use lsp_types::{TextEdit, Uri, WorkspaceEdit}; + +pub(super) fn workspace_edit_from_changes( + changes: HashMap>, +) -> Option { + if changes.is_empty() { + return None; + } + + Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }) +} + +pub(super) fn field_definition_range( + token: &jrsonnet_rowan_parser::SyntaxToken, +) -> Option { + use jrsonnet_rowan_parser::SyntaxKind; + + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let mut node = token.parent(); + while let Some(current) = node { + if current.kind() == SyntaxKind::FIELD_NAME_FIXED { + let parent = current.parent()?; + if matches!( + parent.kind(), + SyntaxKind::MEMBER_FIELD_NORMAL | SyntaxKind::MEMBER_FIELD_METHOD + ) { + return Some(token.text_range()); + } + return None; + } + node = current.parent(); + } + + None +} diff --git a/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs b/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs new file mode 100644 index 00000000..7e9ed985 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs @@ -0,0 +1,333 @@ +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; + +use jrsonnet_lsp_document::{ + to_lsp_range, token_at_offset, CanonicalPath, Document, FileId, LspPosition, SymbolName, +}; +use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_inference::DocumentManager; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::{TextEdit, Uri, WorkspaceEdit}; + +use super::{ + common::{field_definition_range, workspace_edit_from_changes}, + local::rename, +}; + +/// Perform cross-file rename operation. +/// +/// This extends the basic rename to also find references in files that import +/// the current file. For top-level object fields, it finds field accesses +/// in importing files. +/// +/// # Arguments +/// - `document`: The current document +/// - `position`: Cursor position +/// - `new_name`: The new name for the symbol +/// - `uri`: URI of the current document +/// - `current_path`: Canonical path of the current file +/// - `manager`: Document manager for accessing other files +/// - `import_graph`: Import graph for finding importing files +/// +/// # Returns +/// A `WorkspaceEdit` with changes across all affected files. +pub fn rename_cross_file( + document: &Document, + position: LspPosition, + new_name: &SymbolName, + uri: &Uri, + current_path: &CanonicalPath, + manager: &Arc, + import_graph: &ImportGraph, +) -> Option { + // First, do the local rename to get edits for the current file + let mut all_changes: HashMap> = HashMap::new(); + + // Get local edits + if let Some(local_edit) = rename(document, position, new_name, uri) { + if let Some(changes) = local_edit.changes { + all_changes.extend(changes); + } + } + + // Find the symbol name being renamed + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(position, text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + + if let Some(range) = field_definition_range(&token) { + all_changes.entry(uri.clone()).or_default().push(TextEdit { + range: to_lsp_range(range, line_index, text), + new_text: new_name.to_string(), + }); + } + + if token.kind() != SyntaxKind::IDENT { + return workspace_edit_from_changes(all_changes); + } + + let old_name = token.text().to_string(); + let Some(current_file) = import_graph.file(current_path) else { + return workspace_edit_from_changes(all_changes); + }; + + // Find files that import this file + let importers = import_graph.transitive_importers(current_file); + + // Find references in each importing file + for importer_file in importers { + if let Some((importer_uri, edits)) = find_references_in_importer( + importer_file, + current_file, + &old_name, + new_name, + manager, + import_graph, + ) { + all_changes.entry(importer_uri).or_default().extend(edits); + } + } + + workspace_edit_from_changes(all_changes) +} + +fn import_binding_names( + import_graph: &ImportGraph, + importer_file: FileId, + source_file: FileId, +) -> HashSet { + import_graph + .imports_of_target(importer_file, source_file) + .into_iter() + .filter_map(|entry| entry.binding_name.clone()) + .collect() +} + +/// Find references to a symbol in a file that imports the source file. +/// +/// This looks for patterns like: +/// ```jsonnet +/// local lib = import 'source.jsonnet'; +/// lib.field_name // This is a reference to field_name in source.jsonnet +/// ``` +fn find_references_in_importer( + importer_file: FileId, + source_file: FileId, + old_name: &str, + new_name: &SymbolName, + manager: &Arc, + import_graph: &ImportGraph, +) -> Option<(Uri, Vec)> { + use jrsonnet_rowan_parser::nodes::{ExprBase, ExprField}; + + let importer_path = import_graph.path(importer_file)?; + let path = importer_path.as_ref(); + let doc = manager.get_document(path)?; + let uri = path.to_uri().ok()?; + let text = doc.text(); + let line_index = doc.line_index(); + let ast = doc.ast(); + + let mut edits = Vec::new(); + let import_bindings = import_binding_names(import_graph, importer_file, source_file); + + if import_bindings.is_empty() { + return None; + } + + // Find field accesses on the import bindings that match old_name + // ExprField represents obj.field syntax + for node in ast.syntax().descendants() { + if node.kind() != SyntaxKind::EXPR_FIELD { + continue; + } + + let Some(field_expr) = ExprField::cast(node) else { + continue; + }; + + // Check if the field name matches + let Some(field_name) = field_expr.field() else { + continue; + }; + + let Some(field_ident) = field_name.ident_lit() else { + continue; + }; + + if field_ident.text() != old_name { + continue; + } + + // Check if the base expression is one of our import bindings + let Some(base_expr) = field_expr.base() else { + continue; + }; + + let Some(base) = base_expr.expr_base() else { + continue; + }; + + let ExprBase::ExprVar(var) = base else { + continue; + }; + + let Some(var_name) = var.name() else { + continue; + }; + + let Some(var_ident) = var_name.ident_lit() else { + continue; + }; + + if !import_bindings.contains(var_ident.text()) { + continue; + } + + // Found a field access on an import binding matching old_name + edits.push(TextEdit { + range: to_lsp_range(field_ident.text_range(), line_index, text), + new_text: new_name.to_string(), + }); + } + + if edits.is_empty() { + None + } else { + Some((uri, edits)) + } +} + +#[cfg(test)] +mod tests { + use std::io::Write; + + use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_types::GlobalTyStore; + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_cross_file_rename_field_access() { + // Create a temp directory with two files + let temp_dir = TempDir::new().unwrap(); + let lib_path = temp_dir.path().join("lib.jsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + // lib.jsonnet: exports an object with a `helper` field + let lib_code = r"{ helper: function(x) x * 2 }"; + std::fs::File::create(&lib_path) + .unwrap() + .write_all(lib_code.as_bytes()) + .unwrap(); + + // main.jsonnet: imports lib and uses lib.helper + let main_code = r#"local lib = import "lib.jsonnet"; lib.helper(5)"#; + std::fs::File::create(&main_path) + .unwrap() + .write_all(main_code.as_bytes()) + .unwrap(); + + // Create the manager and import graph + let global = Arc::new(GlobalTyStore::new()); + let path_store = jrsonnet_lsp_document::PathStore::new(); + let manager = Arc::new(DocumentManager::new(global, path_store.clone())); + + let lib_canon = CanonicalPath::new(lib_path); + let main_canon = CanonicalPath::new(main_path); + + // Open both documents + manager.open(lib_canon.clone(), lib_code.to_string(), DocVersion::new(1)); + manager.open( + main_canon.clone(), + main_code.to_string(), + DocVersion::new(1), + ); + + // Build import graph + let mut import_graph = ImportGraph::new(path_store); + if let Some(main_doc) = manager.get_document(&main_canon) { + let entries = jrsonnet_lsp_import::parse_document_imports(&main_doc, &|import_path| { + let import_full = temp_dir.path().join(import_path); + import_full.canonicalize().ok().map(CanonicalPath::new) + }); + let main_file = import_graph.intern(&main_canon); + let mut entries = entries; + import_graph.resolve_entry_files(&mut entries); + import_graph.update_file_with_entries(main_file, entries); + } + + // Get the lib document + let lib_doc = manager.get_document(&lib_canon).unwrap(); + let lib_uri = lib_canon.to_uri().expect("lib URI should be valid"); + + // Rename 'helper' in lib.jsonnet (position 2 is the 'h' in 'helper') + // This is an object field, not a local variable, so local rename won't work + // but cross-file rename should find `lib.helper` in main.jsonnet + let pos = (0, 2).into(); + let new_name = SymbolName::new("util").unwrap(); + + let result = rename_cross_file( + &lib_doc, + pos, + &new_name, + &lib_uri, + &lib_canon, + &manager, + &import_graph, + ); + + // Cross-file rename should find `lib.helper` in main.jsonnet + let edit = result.expect("should produce workspace edit"); + let changes = edit.changes.expect("should have changes"); + + // Should rename the source field definition and importer field access. + // lib_code: { helper: function(x) x * 2 } + // ^^^^^^ + // position: 2 8 + // main_code: local lib = import "lib.jsonnet"; lib.helper(5) + // ^^^^^^ + // position: 38 44 + let mut expected_changes: HashMap> = HashMap::new(); + expected_changes.insert( + lib_uri, + vec![TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 2, + }, + end: lsp_types::Position { + line: 0, + character: 8, + }, + }, + new_text: "util".to_string(), + }], + ); + + let main_uri = main_canon.to_uri().expect("main URI should be valid"); + expected_changes.insert( + main_uri, + vec![TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 38, + }, + end: lsp_types::Position { + line: 0, + character: 44, + }, + }, + new_text: "util".to_string(), + }], + ); + assert_eq!(changes, expected_changes); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/rename/local.rs b/crates/jrsonnet-lsp-handlers/src/rename/local.rs new file mode 100644 index 00000000..bd259b64 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/rename/local.rs @@ -0,0 +1,435 @@ +use std::collections::HashMap; + +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document, LspPosition, SymbolName}; +use jrsonnet_lsp_scope::{ + find_definition_range, is_definition_site, is_renameable, is_variable_reference, ScopeResolver, +}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::{PrepareRenameResponse, TextEdit, Uri, WorkspaceEdit}; + +use super::common::{field_definition_range, workspace_edit_from_changes}; + +/// Prepare rename response. +/// Returns the range of the symbol to be renamed and its current name. +#[must_use] +pub fn prepare_rename(document: &Document, position: LspPosition) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + // Convert LSP position to byte offset + let offset = line_index.offset(position, text)?; + + let ast = document.ast(); + + // Find the token at the offset + let token = token_at_offset(ast.syntax(), offset)?; + + // Must be an identifier + if token.kind() != SyntaxKind::IDENT { + return None; + } + + // Check if this is a renameable symbol (definition or reference to a local) + if !is_renameable(&token) && field_definition_range(&token).is_none() { + return None; + } + + let range = to_lsp_range(token.text_range(), line_index, text); + + Some(PrepareRenameResponse::Range(range)) +} + +/// Perform rename operation. +/// Returns a workspace edit with all text edits needed. +/// +/// The `new_name` parameter is a validated `SymbolName`, ensuring that +/// validation happens at the API boundary (in server.rs) before this +/// function is called. +/// +/// Returns `None` if: +/// - The position is not on an identifier +/// - No references are found +pub fn rename( + document: &Document, + position: LspPosition, + new_name: &SymbolName, + uri: &Uri, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(position, text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name = token.text(); + + let definition_range = if is_definition_site(&token) { + token.parent().map(|p| p.text_range()) + } else if is_variable_reference(&token) { + find_definition_range(&token, name) + } else { + return None; + }; + + let def_range = definition_range?; + + let resolver = ScopeResolver::new(ast.syntax()); + let references = resolver.find_references(ast.syntax(), name, def_range); + + if references.is_empty() { + return None; + } + + // SymbolName derefs to String, so we can clone it for each edit + let new_name_str = new_name.to_string(); + + let edits: Vec = references + .into_iter() + .map(|range| TextEdit { + range: to_lsp_range(range, line_index, text), + new_text: new_name_str.clone(), + }) + .collect(); + + let mut changes = HashMap::new(); + changes.insert(uri.clone(), edits); + + workspace_edit_from_changes(changes) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + use rstest::rstest; + + use super::*; + + #[test] + fn test_prepare_rename_on_definition() { + let code = "local x = 1; x"; + // ^def (position 6) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let pos = (0, 6).into(); + + let result = prepare_rename(&doc, pos); + // 'x' spans characters 6-7 + assert_eq!( + result, + Some(PrepareRenameResponse::Range(lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + })) + ); + } + + #[test] + fn test_prepare_rename_on_reference() { + let code = "local x = 1; x + x"; + // ^ref (position 13) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let pos = (0, 13).into(); + + let result = prepare_rename(&doc, pos); + // 'x' at position 13 spans characters 13-14 + assert_eq!( + result, + Some(PrepareRenameResponse::Range(lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + })) + ); + } + + #[test] + fn test_prepare_rename_on_field_definition() { + let code = "{ helper: 1 }"; + // ^ field (position 2) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let pos = (0, 2).into(); + + let result = prepare_rename(&doc, pos); + assert_eq!( + result, + Some(PrepareRenameResponse::Range(lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 2 + }, + end: lsp_types::Position { + line: 0, + character: 8 + }, + })) + ); + } + + #[test] + fn test_rename_local_variable() { + let code = "local x = 1; x + x"; + // ^def ^ref ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Rename 'x' to 'y' from the definition + let pos = (0, 6).into(); + let new_name = SymbolName::new("y").unwrap(); + + let edit = rename(&doc, pos, &new_name, &uri).expect("rename should succeed"); + let changes = edit.changes.expect("should have changes"); + let edits = changes.get(&uri).expect("should have edits for uri"); + + // Edits for x at positions 6, 13, 17 + assert_eq!( + edits, + &vec![ + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + }, + new_text: "y".to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + }, + new_text: "y".to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 17 + }, + end: lsp_types::Position { + line: 0, + character: 18 + }, + }, + new_text: "y".to_string(), + }, + ] + ); + } + + #[test] + fn test_rename_function_parameter() { + let code = "local f(x) = x * x; f(1)"; + // ^def ^ref ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Rename 'x' to 'n' from a reference + let pos = (0, 13).into(); + let new_name = SymbolName::new("n").unwrap(); + + let edit = rename(&doc, pos, &new_name, &uri).expect("rename should succeed"); + let changes = edit.changes.expect("should have changes"); + let edits = changes.get(&uri).expect("should have edits for uri"); + + // Edits for x at positions 8 (param), 13, 17 + assert_eq!( + edits, + &vec![ + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 8 + }, + end: lsp_types::Position { + line: 0, + character: 9 + }, + }, + new_text: "n".to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + }, + new_text: "n".to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 17 + }, + end: lsp_types::Position { + line: 0, + character: 18 + }, + }, + new_text: "n".to_string(), + }, + ] + ); + } + + #[test] + fn test_rename_respects_scope() { + let code = "local x = 1; local f(x) = x; x"; + // ^def1 ^def2 ^ref2 ^ref1 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Rename the outer x + let pos = (0, 6).into(); + let new_name = SymbolName::new("y").unwrap(); + + let edit = rename(&doc, pos, &new_name, &uri).expect("rename should succeed"); + let changes = edit.changes.expect("should have changes"); + let edits = changes.get(&uri).expect("should have edits for uri"); + + // Only edits for outer x at positions 6 (def) and 29 (final ref), not the inner scope + assert_eq!( + edits, + &vec![ + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + }, + new_text: "y".to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 29 + }, + end: lsp_types::Position { + line: 0, + character: 30 + }, + }, + new_text: "y".to_string(), + }, + ] + ); + } + + #[test] + fn test_no_rename_for_non_identifier() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position on '=' (not an identifier) + let pos = (0, 8).into(); + + let result = prepare_rename(&doc, pos); + assert_eq!(result, None); + } + + #[rstest] + #[case("123foo")] + #[case("foo-bar")] + #[case("local")] + #[case("")] + fn test_symbol_name_rejects_invalid_identifier(#[case] new_name: &str) { + use assert_matches::assert_matches; + use jrsonnet_lsp_document::LspError; + + // Validation happens at the boundary when creating SymbolName + let err = SymbolName::new(new_name).expect_err("should reject invalid identifier"); + assert_matches!(err, LspError::InvalidIdentifier(_)); + } + + #[rstest] + #[case("newName")] + #[case("new_name")] + #[case("_private")] + #[case("y")] + fn test_rename_accepts_valid_identifier(#[case] new_name: &str) { + // code: local x = 1; x + // ^def ^ref (positions 6 and 13) + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + let pos = (0, 6).into(); + let symbol_name = SymbolName::new(new_name).expect("should be valid identifier"); + + let edit = rename(&doc, pos, &symbol_name, &uri).expect("rename should succeed"); + let changes = edit.changes.expect("should have changes"); + let edits = changes.get(&uri).expect("should have edits for uri"); + + // x at positions 6-7 (def) and 13-14 (ref) + assert_eq!( + edits, + &vec![ + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + }, + new_text: new_name.to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + }, + new_text: new_name.to_string(), + }, + ] + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/rename/mod.rs b/crates/jrsonnet-lsp-handlers/src/rename/mod.rs new file mode 100644 index 00000000..bf5ee0a5 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/rename/mod.rs @@ -0,0 +1,10 @@ +//! Rename handlers. +//! +//! Provides local and cross-file rename functionality. + +mod common; +mod cross_file; +mod local; + +pub use cross_file::rename_cross_file; +pub use local::{prepare_rename, rename}; diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/classification.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/classification.rs new file mode 100644 index 00000000..364661a3 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/classification.rs @@ -0,0 +1,143 @@ +use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; +use jrsonnet_rowan_parser::{ + nodes::{BindFunction, Destruct, ExprField, ParamsDesc, StmtLocal}, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; + +use super::{SemanticTokenModifierName, SemanticTokenTypeName}; + +/// Classify a variable reference to determine its token type. +pub(super) fn classify_variable_reference(token: &SyntaxToken) -> SemanticTokenTypeName { + // Walk up the scope chain to find the definition. + let Some(mut current) = token.parent() else { + return SemanticTokenTypeName::Variable; + }; + + let name = token.text(); + + while let Some(parent) = current.parent() { + if is_parameter_in_scope(&parent, name) { + return SemanticTokenTypeName::Parameter; + } + if is_function_in_scope(&parent, ¤t, name) { + return SemanticTokenTypeName::Function; + } + current = parent; + } + + SemanticTokenTypeName::Variable +} + +/// Classify a definition site. +pub(super) fn classify_definition_site(destruct_node: &SyntaxNode) -> (SemanticTokenTypeName, u32) { + // Walk up to find if this is a parameter or local variable. + let mut current = destruct_node.clone(); + + while let Some(parent) = current.parent() { + match parent.kind() { + SyntaxKind::PARAM => { + return ( + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ); + } + SyntaxKind::BIND_DESTRUCT | SyntaxKind::FOR_SPEC => { + return ( + SemanticTokenTypeName::Variable, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ); + } + _ => {} + } + current = parent; + } + + ( + SemanticTokenTypeName::Variable, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ) +} + +/// Check if a field access is on builtin std. +pub(super) fn is_stdlib_access(expr_field: &SyntaxNode) -> bool { + let Some(field) = ExprField::cast(expr_field.clone()) else { + return false; + }; + let Some(base) = field.base() else { + return false; + }; + expr_resolves_to_builtin_std(&base) +} + +/// Check if a name is a parameter in the given scope. +fn is_parameter_in_scope(scope: &SyntaxNode, name: &str) -> bool { + match scope.kind() { + SyntaxKind::EXPR_FUNCTION => { + if let Some(func) = jrsonnet_rowan_parser::nodes::ExprFunction::cast(scope.clone()) { + if let Some(params) = func.params_desc() { + return params_contain_name(¶ms, name); + } + } + } + SyntaxKind::BIND_FUNCTION => { + if let Some(func) = BindFunction::cast(scope.clone()) { + if let Some(params) = func.params() { + return params_contain_name(¶ms, name); + } + } + } + _ => {} + } + false +} + +/// Check if params contain a given name. +fn params_contain_name(params: &ParamsDesc, name: &str) -> bool { + for param in params.params() { + if let Some(destruct) = param.destruct() { + if let Destruct::DestructFull(full) = destruct { + if let Some(param_name) = full.name() { + if let Some(ident) = param_name.ident_lit() { + if ident.text() == name { + return true; + } + } + } + } + } + } + false +} + +/// Check if a name is a function defined in the given scope. +fn is_function_in_scope(scope: &SyntaxNode, child: &SyntaxNode, name: &str) -> bool { + if scope.kind() != SyntaxKind::EXPR { + return false; + } + + for stmt_node in scope.children() { + if stmt_node.kind() == SyntaxKind::STMT_LOCAL { + if stmt_node.text_range().end() > child.text_range().start() { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let jrsonnet_rowan_parser::nodes::Bind::BindFunction(bf) = bind { + if let Some(bind_name) = bf.name() { + if let Some(ident) = bind_name.ident_lit() { + if ident.text() == name { + return true; + } + } + } + } + } + } + } + } + false +} diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/encode.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/encode.rs new file mode 100644 index 00000000..3b83eac4 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/encode.rs @@ -0,0 +1,411 @@ +use jrsonnet_lsp_document::{Document, LineIndex}; +use jrsonnet_rowan_parser::AstNode; +use lsp_types::{Range, SemanticToken, SemanticTokens}; + +use super::{walk, SemanticTokenTypeName}; + +fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) +} + +/// Compute semantic tokens for a document. +#[must_use] +pub fn semantic_tokens(document: &Document) -> SemanticTokens { + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + let mut builder = SemanticTokenBuilder::new(line_index, text, None); + + // Walk all tokens in the document. + for element in ast.syntax().descendants_with_tokens() { + if let Some(token) = element.into_token() { + walk::visit_token(&mut builder, &token); + } + } + + builder.build() +} + +/// Compute semantic tokens for a specific range in a document. +#[must_use] +pub fn semantic_tokens_range(document: &Document, range: Range) -> SemanticTokens { + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + let mut builder = SemanticTokenBuilder::new(line_index, text, Some(range)); + + // Walk all tokens in the document. + for element in ast.syntax().descendants_with_tokens() { + if let Some(token) = element.into_token() { + walk::visit_token(&mut builder, &token); + } + } + + builder.build() +} + +/// Builder for semantic tokens. +pub(super) struct SemanticTokenBuilder<'a> { + line_index: &'a LineIndex, + text: &'a str, + range: Option, + tokens: Vec, +} + +/// Raw token before delta encoding. +struct RawToken { + line: u32, + start_char: u32, + length: u32, + token_type: u32, + token_modifiers: u32, +} + +impl<'a> SemanticTokenBuilder<'a> { + pub(super) fn new(line_index: &'a LineIndex, text: &'a str, range: Option) -> Self { + Self { + line_index, + text, + range, + tokens: Vec::new(), + } + } + + pub(super) fn add_token( + &mut self, + token: &jrsonnet_rowan_parser::SyntaxToken, + token_type: SemanticTokenTypeName, + token_modifiers: u32, + ) { + let range = token.text_range(); + let start_pos = self + .line_index + .position(range.start().into(), self.text) + .unwrap_or_default(); + + // Handle multi-line tokens (like block strings/comments). + let token_text = token.text(); + let lines: Vec<&str> = token_text.lines().collect(); + let token_type_u32 = token_type as u32; + + if lines.len() <= 1 { + // Single line token. + self.push_token_if_in_range(RawToken { + line: start_pos.line.0, + start_char: start_pos.character.0, + length: to_u32(token_text.len()), + token_type: token_type_u32, + token_modifiers, + }); + } else { + // Multi-line token: emit one token per line. + for (i, line) in lines.iter().enumerate() { + let line_num = start_pos.line.0.saturating_add(to_u32(i)); + let start_char = if i == 0 { start_pos.character.0 } else { 0 }; + let length = to_u32(line.len()); + + if length > 0 { + self.push_token_if_in_range(RawToken { + line: line_num, + start_char, + length, + token_type: token_type_u32, + token_modifiers, + }); + } + } + } + } + + fn push_token_if_in_range(&mut self, token: RawToken) { + let Some(range) = &self.range else { + self.tokens.push(token); + return; + }; + + let token_end = token.start_char.saturating_add(token.length); + if token.line < range.start.line || token.line > range.end.line { + return; + } + if token.line == range.start.line && token_end <= range.start.character { + return; + } + if token.line == range.end.line && token.start_char >= range.end.character { + return; + } + + self.tokens.push(token); + } + + fn build(mut self) -> SemanticTokens { + // Sort tokens by position. + self.tokens + .sort_unstable_by(|a, b| (a.line, a.start_char).cmp(&(b.line, b.start_char))); + + // Convert to delta-encoded SemanticToken format. + let mut data = Vec::with_capacity(self.tokens.len()); + let mut prev_line = 0u32; + let mut prev_char = 0u32; + + for token in &self.tokens { + let delta_line = token.line - prev_line; + let delta_start = if delta_line == 0 { + token.start_char - prev_char + } else { + token.start_char + }; + + data.push(SemanticToken { + delta_line, + delta_start, + length: token.length, + token_type: token.token_type, + token_modifiers_bitset: token.token_modifiers, + }); + + prev_line = token.line; + prev_char = token.start_char; + } + + SemanticTokens { + result_id: None, + data, + } + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use lsp_types::{Range, SemanticTokens}; + + use super::{semantic_tokens, semantic_tokens_range}; + use crate::semantic_tokens::{SemanticTokenModifierName, SemanticTokenTypeName}; + + #[derive(Debug, Clone, PartialEq, Eq)] + struct AbsoluteToken { + line: u32, + start_char: u32, + length: u32, + token_type: u32, + token_modifiers: u32, + } + + fn token( + line: u32, + start_char: u32, + length: u32, + token_type: SemanticTokenTypeName, + token_modifiers: u32, + ) -> AbsoluteToken { + AbsoluteToken { + line, + start_char, + length, + token_type: token_type as u32, + token_modifiers, + } + } + + fn decode_absolute(tokens: &SemanticTokens) -> Vec { + let mut line = 0_u32; + let mut start_char = 0_u32; + let mut out = Vec::with_capacity(tokens.data.len()); + + for token in &tokens.data { + line = line.saturating_add(token.delta_line); + start_char = if token.delta_line == 0 { + start_char.saturating_add(token.delta_start) + } else { + token.delta_start + }; + out.push(AbsoluteToken { + line, + start_char, + length: token.length, + token_type: token.token_type, + token_modifiers: token.token_modifiers_bitset, + }); + } + + out + } + + #[test] + fn test_semantic_tokens_keywords() { + let code = "local x = if true then 1 else 2; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert_eq!( + decode_absolute(&tokens), + vec![ + token(0, 0, 5, SemanticTokenTypeName::Keyword, 0), + token( + 0, + 6, + 1, + SemanticTokenTypeName::Variable, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token(0, 8, 1, SemanticTokenTypeName::Operator, 0), + token(0, 10, 2, SemanticTokenTypeName::Keyword, 0), + token(0, 13, 4, SemanticTokenTypeName::Keyword, 0), + token(0, 18, 4, SemanticTokenTypeName::Keyword, 0), + token(0, 23, 1, SemanticTokenTypeName::Number, 0), + token(0, 25, 4, SemanticTokenTypeName::Keyword, 0), + token(0, 30, 1, SemanticTokenTypeName::Number, 0), + token(0, 33, 1, SemanticTokenTypeName::Variable, 0), + ] + ); + } + + #[test] + fn test_semantic_tokens_function() { + let code = "local add(a, b) = a + b; add(1, 2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert_eq!( + decode_absolute(&tokens), + vec![ + token(0, 0, 5, SemanticTokenTypeName::Keyword, 0), + token( + 0, + 6, + 3, + SemanticTokenTypeName::Function, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token( + 0, + 10, + 1, + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token( + 0, + 13, + 1, + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token(0, 16, 1, SemanticTokenTypeName::Operator, 0), + token(0, 18, 1, SemanticTokenTypeName::Parameter, 0), + token(0, 20, 1, SemanticTokenTypeName::Operator, 0), + token(0, 22, 1, SemanticTokenTypeName::Parameter, 0), + token(0, 25, 3, SemanticTokenTypeName::Function, 0), + token(0, 29, 1, SemanticTokenTypeName::Number, 0), + token(0, 32, 1, SemanticTokenTypeName::Number, 0), + ] + ); + } + + #[test] + fn test_semantic_tokens_object() { + let code = r#"{ name: "test", greet(x): "Hello " + x }"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert_eq!( + decode_absolute(&tokens), + vec![ + token( + 0, + 2, + 4, + SemanticTokenTypeName::Property, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token(0, 8, 6, SemanticTokenTypeName::String, 0), + token( + 0, + 16, + 5, + SemanticTokenTypeName::Method, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token( + 0, + 22, + 1, + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token(0, 26, 8, SemanticTokenTypeName::String, 0), + token(0, 35, 1, SemanticTokenTypeName::Operator, 0), + token(0, 37, 1, SemanticTokenTypeName::Variable, 0), + ] + ); + } + + #[test] + fn test_semantic_tokens_stdlib() { + let code = "std.length([1, 2, 3])"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert_eq!( + decode_absolute(&tokens), + vec![ + token( + 0, + 0, + 3, + SemanticTokenTypeName::Namespace, + SemanticTokenModifierName::DefaultLibrary.as_bitset() + ), + token( + 0, + 4, + 6, + SemanticTokenTypeName::Function, + SemanticTokenModifierName::DefaultLibrary.as_bitset() + ), + token(0, 12, 1, SemanticTokenTypeName::Number, 0), + token(0, 15, 1, SemanticTokenTypeName::Number, 0), + token(0, 18, 1, SemanticTokenTypeName::Number, 0), + ] + ); + } + + #[test] + fn test_semantic_tokens_range_filters_lines() { + let code = "local x = 1\nlocal y = x + 1"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens_range( + &doc, + Range { + start: lsp_types::Position { + line: 1, + character: 0, + }, + end: lsp_types::Position { + line: 1, + character: 100, + }, + }, + ); + assert_eq!( + decode_absolute(&tokens), + vec![ + token(1, 0, 5, SemanticTokenTypeName::Keyword, 0), + token(1, 6, 1, SemanticTokenTypeName::Variable, 0), + token(1, 8, 1, SemanticTokenTypeName::Operator, 0), + token(1, 12, 1, SemanticTokenTypeName::Operator, 0), + token(1, 14, 1, SemanticTokenTypeName::Number, 0), + ] + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/legend.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/legend.rs new file mode 100644 index 00000000..bd33de11 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/legend.rs @@ -0,0 +1,130 @@ +use std::fmt::Write as _; + +use jrsonnet_lsp_types::{SemanticTokenModifierName, SemanticTokenTypeName}; +use lsp_types::{SemanticTokenType, SemanticTokensLegend}; + +/// Semantic token types we support. +/// +/// The indices in this array must match the `SemanticTokenTypeName` enum values. +pub const TOKEN_TYPES: &[SemanticTokenType] = &[ + SemanticTokenType::NAMESPACE, // 0: std + SemanticTokenType::TYPE, // 1: (unused) + SemanticTokenType::CLASS, // 2: (unused) + SemanticTokenType::ENUM, // 3: (unused) + SemanticTokenType::INTERFACE, // 4: (unused) + SemanticTokenType::STRUCT, // 5: (unused) + SemanticTokenType::TYPE_PARAMETER, // 6: (unused) + SemanticTokenType::PARAMETER, // 7: function parameters + SemanticTokenType::VARIABLE, // 8: local variables + SemanticTokenType::PROPERTY, // 9: object fields + SemanticTokenType::ENUM_MEMBER, // 10: (unused) + SemanticTokenType::EVENT, // 11: (unused) + SemanticTokenType::FUNCTION, // 12: function definitions + SemanticTokenType::METHOD, // 13: object methods + SemanticTokenType::MACRO, // 14: (unused) + SemanticTokenType::KEYWORD, // 15: keywords + SemanticTokenType::MODIFIER, // 16: (unused) + SemanticTokenType::COMMENT, // 17: comments + SemanticTokenType::STRING, // 18: strings + SemanticTokenType::NUMBER, // 19: numbers + SemanticTokenType::REGEXP, // 20: (unused) + SemanticTokenType::OPERATOR, // 21: operators +]; + +/// Semantic token modifiers (bit flags). +pub const TOKEN_MODIFIERS: &[lsp_types::SemanticTokenModifier] = &[ + lsp_types::SemanticTokenModifier::DECLARATION, + lsp_types::SemanticTokenModifier::DEFINITION, + lsp_types::SemanticTokenModifier::READONLY, + lsp_types::SemanticTokenModifier::STATIC, + lsp_types::SemanticTokenModifier::DEPRECATED, + lsp_types::SemanticTokenModifier::ABSTRACT, + lsp_types::SemanticTokenModifier::ASYNC, + lsp_types::SemanticTokenModifier::MODIFICATION, + lsp_types::SemanticTokenModifier::DOCUMENTATION, + lsp_types::SemanticTokenModifier::DEFAULT_LIBRARY, +]; + +/// Render a Markdown reference block for semantic token types and modifiers. +/// +/// This output is consumed by docs validation tests to keep docs in sync with +/// the actual semantic token legend. +#[must_use] +pub fn semantic_token_reference_markdown() -> String { + let mut markdown = String::new(); + markdown.push_str("#### Semantic Token Legend (Generated)\n\n"); + markdown.push_str("Token types (`index`: `lsp_name`, usage):\n"); + for (index, token_type) in TOKEN_TYPES.iter().enumerate() { + let used = SemanticTokenTypeName::from_repr(index as u32); + let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_str().to_owned()); + let _ = writeln!(markdown, "- `{index}`: `{}` ({usage})", token_type.as_str()); + } + markdown.push('\n'); + markdown.push_str("Token modifiers (`bit`: `lsp_name`, usage):\n"); + for (index, modifier) in TOKEN_MODIFIERS.iter().enumerate() { + let used = SemanticTokenModifierName::from_repr(index as u32); + let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_str().to_owned()); + let _ = writeln!( + markdown, + "- `1 << {index}`: `{}` ({usage})", + modifier.as_str() + ); + } + markdown +} + +/// Get the semantic tokens legend. +#[must_use] +pub fn legend() -> SemanticTokensLegend { + SemanticTokensLegend { + token_types: TOKEN_TYPES.to_vec(), + token_modifiers: TOKEN_MODIFIERS.to_vec(), + } +} + +#[cfg(test)] +mod tests { + use super::{legend, semantic_token_reference_markdown, TOKEN_MODIFIERS, TOKEN_TYPES}; + + #[test] + fn test_legend() { + let leg = legend(); + assert_eq!(leg.token_types, TOKEN_TYPES.to_vec()); + assert_eq!(leg.token_modifiers, TOKEN_MODIFIERS.to_vec()); + } + + #[test] + fn test_semantic_token_legend_docs_are_in_sync() { + let docs_path = + std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("../../docs/lsp/HANDLERS.md"); + let docs = std::fs::read_to_string(&docs_path).expect("read HANDLERS.md"); + let start_marker = ""; + let end_marker = ""; + let start_index = docs + .find(start_marker) + .expect("semantic token legend start marker should exist"); + let end_marker_index = docs + .find(end_marker) + .expect("semantic token legend end marker should exist"); + let end_index = end_marker_index + end_marker.len(); + let actual = docs[start_index..end_index].trim_end(); + let expected = format!( + "{start_marker}\n{}\n{end_marker}", + semantic_token_reference_markdown().trim_end() + ); + let normalized_actual = actual + .lines() + .map(str::trim_end) + .filter(|line| !line.is_empty()) + .collect::>(); + let normalized_expected = expected + .lines() + .map(str::trim_end) + .filter(|line| !line.is_empty()) + .collect::>(); + assert_eq!( + normalized_actual, normalized_expected, + "semantic token docs drifted; update docs/lsp/HANDLERS.md legend block" + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/mod.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/mod.rs new file mode 100644 index 00000000..f8ea8e0f --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/mod.rs @@ -0,0 +1,12 @@ +//! Semantic tokens handler. +//! +//! Provides semantic highlighting for Jsonnet code. + +mod classification; +mod encode; +mod legend; +mod walk; + +pub use encode::{semantic_tokens, semantic_tokens_range}; +pub use jrsonnet_lsp_types::{SemanticTokenModifierName, SemanticTokenTypeName}; +pub use legend::{legend, semantic_token_reference_markdown, TOKEN_MODIFIERS, TOKEN_TYPES}; diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/walk.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/walk.rs new file mode 100644 index 00000000..d769dd7c --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/walk.rs @@ -0,0 +1,117 @@ +use jrsonnet_lsp_scope::ident_resolves_to_builtin_std; +use jrsonnet_rowan_parser::{SyntaxKind, SyntaxToken}; + +use super::{ + classification::{classify_definition_site, classify_variable_reference, is_stdlib_access}, + encode::SemanticTokenBuilder, + SemanticTokenModifierName, SemanticTokenTypeName, +}; + +pub(super) fn visit_token(builder: &mut SemanticTokenBuilder<'_>, token: &SyntaxToken) { + let kind = token.kind(); + + if kind.is_semantic_keyword_token() { + builder.add_token(token, SemanticTokenTypeName::Keyword, 0); + return; + } + if kind.is_semantic_comment_token() { + builder.add_token(token, SemanticTokenTypeName::Comment, 0); + return; + } + if kind.is_semantic_string_token() { + builder.add_token(token, SemanticTokenTypeName::String, 0); + return; + } + if kind.is_semantic_number_token() { + builder.add_token(token, SemanticTokenTypeName::Number, 0); + return; + } + if kind == SyntaxKind::IDENT { + // Identifiers need AST context for precise token type. + visit_identifier(builder, token); + return; + } + if kind.is_semantic_operator_token() { + builder.add_token(token, SemanticTokenTypeName::Operator, 0); + } +} + +fn visit_identifier(builder: &mut SemanticTokenBuilder<'_>, token: &SyntaxToken) { + let Some(parent) = token.parent() else { + return; + }; + + // Check if this is "std". + if token.text() == "std" && ident_resolves_to_builtin_std(token) { + builder.add_token( + token, + SemanticTokenTypeName::Namespace, + SemanticTokenModifierName::DefaultLibrary.as_bitset(), + ); + return; + } + + // Check context based on parent/grandparent. + if parent.kind() == SyntaxKind::NAME { + if let Some(grandparent) = parent.parent() { + match grandparent.kind() { + // Variable reference. + SyntaxKind::EXPR_VAR => { + let token_type = classify_variable_reference(token); + builder.add_token(token, token_type, 0); + } + // Definition sites. + SyntaxKind::DESTRUCT_FULL => { + let (token_type, modifiers) = classify_definition_site(&grandparent); + builder.add_token(token, token_type, modifiers); + } + SyntaxKind::BIND_FUNCTION => { + builder.add_token( + token, + SemanticTokenTypeName::Function, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ); + } + // Field access (std.xyz or obj.field). + SyntaxKind::EXPR_FIELD => { + if is_stdlib_access(&grandparent) { + builder.add_token( + token, + SemanticTokenTypeName::Function, + SemanticTokenModifierName::DefaultLibrary.as_bitset(), + ); + } else { + builder.add_token(token, SemanticTokenTypeName::Property, 0); + } + } + _ => {} + } + } + } + + // Check for field name in object (ID node). + if parent.kind() == SyntaxKind::NAME { + if let Some(grandparent) = parent.parent() { + if grandparent.kind() == SyntaxKind::FIELD_NAME_FIXED { + if let Some(great_grandparent) = grandparent.parent() { + if great_grandparent.kind() == SyntaxKind::MEMBER_FIELD_METHOD { + builder.add_token( + token, + SemanticTokenTypeName::Method, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ); + } else if great_grandparent.kind() == SyntaxKind::MEMBER_FIELD_NORMAL { + builder.add_token( + token, + SemanticTokenTypeName::Property, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ); + } + } + } + } + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/context.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/context.rs new file mode 100644 index 00000000..3e96333e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/context.rs @@ -0,0 +1,95 @@ +use jrsonnet_rowan_parser::{ + nodes::{Arg, ArgsDesc, ExprBase, ExprCall, ExprField}, + AstNode, SyntaxKind, SyntaxToken, +}; + +use super::model::{to_u32, ActiveArg}; + +pub(super) fn find_call_context( + token: &SyntaxToken, + cursor_offset: rowan::TextSize, +) -> Option<(String, ActiveArg)> { + let mut current = token.parent()?; + + loop { + if let Some(call) = ExprCall::cast(current.clone()) { + return extract_call_info(&call, cursor_offset); + } + + if current.kind() == SyntaxKind::ARGS_DESC { + if let Some(call) = current.parent().and_then(ExprCall::cast) { + return extract_call_info(&call, cursor_offset); + } + } + + current = current.parent()?; + } +} + +fn extract_call_info( + call: &ExprCall, + cursor_offset: rowan::TextSize, +) -> Option<(String, ActiveArg)> { + let func_name = extract_callee_name(call)?; + let active_arg = active_arg_for_call(call, cursor_offset); + Some((func_name, active_arg)) +} + +fn extract_callee_name(call: &ExprCall) -> Option { + let callee = call.callee()?; + match callee.expr_base()? { + ExprBase::ExprVar(var) => Some(var.name()?.ident_lit()?.text().to_string()), + ExprBase::ExprField(field) => extract_field_name(&field), + _ => None, + } +} + +fn extract_field_name(field: &ExprField) -> Option { + Some(field.field()?.ident_lit()?.text().to_string()) +} + +fn active_arg_for_call(call: &ExprCall, cursor_offset: rowan::TextSize) -> ActiveArg { + let Some(args_desc) = call.args_desc() else { + return ActiveArg::default(); + }; + let positional_index = positional_arg_index(&args_desc, cursor_offset); + let named_arg = args_desc + .args() + .nth(usize::try_from(positional_index).unwrap_or(usize::MAX)) + .and_then(|arg| arg_name(&arg)); + + ActiveArg { + positional_index, + named_arg, + } +} + +fn positional_arg_index(args_desc: &ArgsDesc, cursor_offset: rowan::TextSize) -> u32 { + let args: Vec<_> = args_desc.args().collect(); + if args.is_empty() { + return count_preceding_commas(args_desc, cursor_offset); + } + + for (index, arg) in args.iter().enumerate() { + if cursor_offset <= arg.syntax().text_range().end() { + return to_u32(index); + } + } + + count_preceding_commas(args_desc, cursor_offset) +} + +fn arg_name(arg: &Arg) -> Option { + Some(arg.name()?.ident_lit()?.text().to_string()) +} + +fn count_preceding_commas(args_desc: &ArgsDesc, cursor_offset: rowan::TextSize) -> u32 { + to_u32( + args_desc + .syntax() + .children_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .filter(|t| t.kind() == SyntaxKind::COMMA && t.text_range().end() <= cursor_offset) + .count(), + ) +} diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/handler.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/handler.rs new file mode 100644 index 00000000..041e8a22 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/handler.rs @@ -0,0 +1,153 @@ +use jrsonnet_lsp_document::{Document, LspPosition}; +use jrsonnet_rowan_parser::{AstNode, SyntaxNode, SyntaxToken}; + +use super::{context::find_call_context, render::get_signature_for_function}; + +#[must_use] +pub fn signature_help( + document: &Document, + position: LspPosition, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + let offset = line_index.offset(position, text)?; + let cursor_offset: rowan::TextSize = offset.into(); + + let ast = document.ast(); + let root = ast.syntax(); + + let token = token_at_offset(root, offset.into())?; + let (func_name, active_arg) = find_call_context(&token, cursor_offset)?; + + get_signature_for_function(&func_name, &token, &active_arg) +} + +fn token_at_offset(root: &SyntaxNode, offset: u32) -> Option { + root.token_at_offset(rowan::TextSize::from(offset)) + .right_biased() +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + use lsp_types::{ + Documentation, MarkupContent, MarkupKind, ParameterInformation, ParameterLabel, + SignatureHelp, SignatureInformation, + }; + + use super::{super::model::to_u32, *}; + + fn document_with_cursor(code_with_cursor: &str) -> (Document, LspPosition) { + let cursor = code_with_cursor + .find('|') + .expect("test source should include `|` cursor marker"); + let mut source = code_with_cursor.to_string(); + source.remove(cursor); + + let before = &code_with_cursor[..cursor]; + let line = to_u32(before.bytes().filter(|&b| b == b'\n').count()); + let column = to_u32( + before + .rsplit_once('\n') + .map_or(before.len(), |(_, suffix)| suffix.len()), + ); + + ( + Document::new(source, DocVersion::new(1)), + (line, column).into(), + ) + } + + #[test] + fn test_stdlib_signature_help() { + let code = "std.filter(|"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 11).into(); + + let help = signature_help(&doc, pos); + assert_eq!( + help, + Some(SignatureHelp { + signatures: vec![SignatureInformation { + label: "std.filter(func, arr)".to_string(), + documentation: Some(Documentation::MarkupContent(MarkupContent { + kind: MarkupKind::Markdown, + value: "Returns elements of `arr` where `func(x)` is true.".to_string(), + })), + parameters: Some(vec![ + ParameterInformation { + label: ParameterLabel::LabelOffsets([11, 15]), + documentation: None, + }, + ParameterInformation { + label: ParameterLabel::LabelOffsets([17, 20]), + documentation: None, + }, + ]), + active_parameter: Some(0), + }], + active_signature: Some(0), + active_parameter: Some(0), + }) + ); + } + + #[test] + fn test_local_function_signature_help() { + let code = r"local add(a, b) = a + b; add(1|"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 30).into(); + + let help = signature_help(&doc, pos); + assert_eq!( + help, + Some(SignatureHelp { + signatures: vec![SignatureInformation { + label: "add(a, b)".to_string(), + documentation: None, + parameters: Some(vec![ + ParameterInformation { + label: ParameterLabel::LabelOffsets([4, 5]), + documentation: None, + }, + ParameterInformation { + label: ParameterLabel::LabelOffsets([7, 8]), + documentation: None, + }, + ]), + active_parameter: Some(0), + }], + active_signature: Some(0), + active_parameter: Some(0), + }) + ); + } + + #[test] + fn test_no_signature_help_outside_call() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 13).into(); + + let help = signature_help(&doc, pos); + assert_eq!(help, None); + } + + #[test] + fn test_stdlib_named_argument_active_parameter() { + let (doc, pos) = document_with_cursor(r#"std.substr(str="abc", from=1|, len=1)"#); + let help = signature_help(&doc, pos).expect("signature help should be available"); + assert_eq!(help.active_parameter, Some(1)); + assert_eq!(help.signatures[0].active_parameter, Some(1)); + } + + #[test] + fn test_local_named_argument_active_parameter() { + let (doc, pos) = + document_with_cursor(r"local add(a, b, c) = a + b + c; add(c=3, a=1, b=2|)"); + let help = signature_help(&doc, pos).expect("signature help should be available"); + assert_eq!(help.active_parameter, Some(1)); + assert_eq!(help.signatures[0].active_parameter, Some(1)); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/local.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/local.rs new file mode 100644 index 00000000..63193b34 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/local.rs @@ -0,0 +1,130 @@ +use jrsonnet_rowan_parser::{ + nodes::{Bind, BindFunction, Destruct, ExprBase, Param, StmtLocal}, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; + +use super::model::{LocalFunctionSignature, SignatureParamInfo}; + +pub(super) fn find_local_function_signature( + token: &SyntaxToken, + name: &str, +) -> Option { + let mut current = token.parent()?; + + while let Some(parent) = current.parent() { + if let Some(sig) = check_scope_for_function(&parent, ¤t, name) { + return Some(sig); + } + current = parent; + } + + None +} + +fn check_scope_for_function( + scope: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + match scope.kind() { + SyntaxKind::EXPR => check_expr_for_function(scope, child, name), + _ => None, + } +} + +fn check_expr_for_function( + expr: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + for stmt_node in expr.children() { + if stmt_node.kind() == SyntaxKind::STMT_LOCAL { + if stmt_node.text_range().end() > child.text_range().start() { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let Some(sig) = check_bind_for_function(&bind, name) { + return Some(sig); + } + } + } + } + } + None +} + +fn check_bind_for_function(bind: &Bind, name: &str) -> Option { + match bind { + Bind::BindDestruct(bd) => { + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() != name { + return None; + } + + let value = bd.value()?; + if let Some(base) = value.expr_base() { + if let ExprBase::ExprFunction(func) = base { + return extract_params_from_function_expr(&func); + } + } + } + None + } + Bind::BindFunction(bf) => { + let bind_name = bf.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() != name { + return None; + } + + extract_params_from_bind_function(bf) + } + } +} + +fn extract_params_from_bind_function(func: &BindFunction) -> Option { + let params_desc = func.params()?; + let params: Vec = params_desc + .params() + .filter_map(|p| extract_param_info(&p)) + .collect(); + + Some(LocalFunctionSignature { params }) +} + +fn extract_params_from_function_expr( + func: &jrsonnet_rowan_parser::nodes::ExprFunction, +) -> Option { + let params_desc = func.params_desc()?; + let params: Vec = params_desc + .params() + .filter_map(|p| extract_param_info(&p)) + .collect(); + + Some(LocalFunctionSignature { params }) +} + +fn extract_param_info(param: &Param) -> Option { + let destruct = param.destruct()?; + let name = match destruct { + Destruct::DestructFull(full) => { + let name = full.name()?; + name.ident_lit()?.text().to_string() + } + Destruct::DestructArray(_) => "[array]".to_string(), + Destruct::DestructObject(_) => "{object}".to_string(), + Destruct::DestructSkip(_) => return None, + }; + + let label = if param.assign_token().is_some() { + format!("{name}=...") + } else { + name.clone() + }; + Some(SignatureParamInfo { label, name }) +} diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/mod.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/mod.rs new file mode 100644 index 00000000..238494e2 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/mod.rs @@ -0,0 +1,11 @@ +//! Signature help handler. +//! +//! Provides parameter information when the user is inside a function call. + +mod context; +mod handler; +mod local; +mod model; +mod render; + +pub use handler::signature_help; diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/model.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/model.rs new file mode 100644 index 00000000..721a9f4b --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/model.rs @@ -0,0 +1,19 @@ +#[derive(Debug, Default, Clone)] +pub(super) struct ActiveArg { + pub(super) positional_index: u32, + pub(super) named_arg: Option, +} + +#[derive(Debug, Clone)] +pub(super) struct SignatureParamInfo { + pub(super) label: String, + pub(super) name: String, +} + +pub(super) struct LocalFunctionSignature { + pub(super) params: Vec, +} + +pub(super) fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) +} diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/render.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/render.rs new file mode 100644 index 00000000..d35ac1cc --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/render.rs @@ -0,0 +1,168 @@ +use jrsonnet_lsp_stdlib as stdlib; +use jrsonnet_rowan_parser::SyntaxToken; +use lsp_types::{ + Documentation, MarkupContent, MarkupKind, ParameterInformation, ParameterLabel, SignatureHelp, + SignatureInformation, +}; + +use super::{ + local::find_local_function_signature, + model::{to_u32, ActiveArg, SignatureParamInfo}, +}; + +pub(super) fn get_signature_for_function( + name: &str, + token: &SyntaxToken, + active_arg: &ActiveArg, +) -> Option { + if let Some(doc) = stdlib::get_stdlib_doc(name) { + let (params_info, variadic) = stdlib_params(name, &doc.signature); + let active_param = resolve_active_parameter(¶ms_info, variadic, active_arg); + let signature_name = format!("std.{name}"); + let (label, param_offsets) = signature_label_with_offsets(&signature_name, ¶ms_info); + let params = to_lsp_params(¶m_offsets); + + return Some(SignatureHelp { + signatures: vec![SignatureInformation { + label, + documentation: Some(Documentation::MarkupContent(MarkupContent { + kind: MarkupKind::Markdown, + value: doc.description.to_string(), + })), + parameters: Some(params), + active_parameter: Some(active_param), + }], + active_signature: Some(0), + active_parameter: Some(active_param), + }); + } + + if let Some(sig) = find_local_function_signature(token, name) { + let active_param = resolve_active_parameter(&sig.params, false, active_arg); + let (label, param_offsets) = signature_label_with_offsets(name, &sig.params); + let params = to_lsp_params(¶m_offsets); + + return Some(SignatureHelp { + signatures: vec![SignatureInformation { + label, + documentation: None, + parameters: Some(params), + active_parameter: Some(active_param), + }], + active_signature: Some(0), + active_parameter: Some(active_param), + }); + } + + None +} + +fn signature_label_with_offsets( + name: &str, + params: &[SignatureParamInfo], +) -> (String, Vec<[u32; 2]>) { + let mut label = String::new(); + let mut offsets = Vec::with_capacity(params.len()); + label.push_str(name); + label.push('('); + + for (index, param) in params.iter().enumerate() { + if index > 0 { + label.push_str(", "); + } + let start = to_u32(label.len()); + label.push_str(¶m.label); + let end = to_u32(label.len()); + offsets.push([start, end]); + } + + label.push(')'); + (label, offsets) +} + +fn to_lsp_params(offsets: &[[u32; 2]]) -> Vec { + offsets + .iter() + .map(|offset| ParameterInformation { + label: ParameterLabel::LabelOffsets(*offset), + documentation: None, + }) + .collect() +} + +fn resolve_active_parameter( + params: &[SignatureParamInfo], + _variadic: bool, + active_arg: &ActiveArg, +) -> u32 { + if params.is_empty() { + return active_arg.positional_index; + } + + if let Some(named_arg) = active_arg.named_arg.as_deref() { + if let Some(index) = params.iter().position(|param| param.name == named_arg) { + return to_u32(index); + } + } + + let max_index = to_u32(params.len().saturating_sub(1)); + active_arg.positional_index.min(max_index) +} + +fn stdlib_params(name: &str, fallback_signature: &str) -> (Vec, bool) { + if let Some(func_data) = stdlib::get_stdlib_func_data(name) { + let params = func_data + .params + .into_iter() + .map(|param| { + let label = if param.has_default { + format!("{}=...", param.name) + } else { + param.name.clone() + }; + SignatureParamInfo { + label, + name: param.name, + } + }) + .collect(); + return (params, func_data.variadic); + } + + let params = parse_signature_params(fallback_signature) + .into_iter() + .map(|label| { + let name = label + .split_once('=') + .map_or_else(|| label.clone(), |(name, _)| name.to_string()); + SignatureParamInfo { label, name } + }) + .collect(); + (params, false) +} + +fn parse_signature_params(signature: &str) -> Vec { + let s = signature.trim_start_matches('('); + s.split(',') + .map(|p| p.trim().to_string()) + .filter(|p| !p.is_empty()) + .collect() +} + +#[cfg(test)] +mod tests { + use super::parse_signature_params; + + #[test] + fn test_parse_signature_params() { + assert_eq!( + parse_signature_params("(func, arr"), + vec!["func".to_string(), "arr".to_string()] + ); + assert_eq!(parse_signature_params("(x"), vec!["x".to_string()]); + assert_eq!( + parse_signature_params("(a, b, c"), + vec!["a".to_string(), "b".to_string(), "c".to_string()] + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/symbols/extract.rs b/crates/jrsonnet-lsp-handlers/src/symbols/extract.rs new file mode 100644 index 00000000..dc7e3f5c --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/symbols/extract.rs @@ -0,0 +1,340 @@ +use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, BindDestruct, BindFunction, ExprBase, ExprObject, Member, MemberBindStmt, + MemberFieldMethod, MemberFieldNormal, ObjBody, ObjBodyMemberList, ObjLocal, Stmt, + StmtLocal, + }, + AstNode, +}; +use lsp_types::{DocumentSymbol, SymbolKind}; + +use super::names::{build_document_symbol, create_symbol, get_destruct_name, get_field_name}; + +#[must_use] +pub fn document_symbols(document: &Document) -> Vec { + let ast = document.ast(); + let text = document.text(); + let line_index = document.line_index(); + + let mut symbols = Vec::new(); + + if let Some(expr) = ast.expr() { + for stmt in expr.stmts() { + if let Some(sym) = process_stmt(&stmt, text, line_index) { + symbols.push(sym); + } + } + + if let Some(base) = expr.expr_base() { + symbols.extend(process_expr_base(&base, text, line_index)); + } + } + + symbols +} + +fn process_stmt(stmt: &Stmt, text: &str, line_index: &LineIndex) -> Option { + match stmt { + Stmt::StmtLocal(local) => process_local_stmt(local, text, line_index), + Stmt::StmtAssert(_) => None, + } +} + +fn process_local_stmt( + local: &StmtLocal, + text: &str, + line_index: &LineIndex, +) -> Option { + let binds: Vec<_> = local.binds().collect(); + + if binds.len() == 1 { + binds + .first() + .and_then(|bind| process_bind(bind, text, line_index)) + } else if !binds.is_empty() { + let range = local.syntax().text_range(); + let children: Vec<_> = binds + .iter() + .filter_map(|b| process_bind(b, text, line_index)) + .collect(); + + if children.is_empty() { + return None; + } + + create_symbol( + "local".to_string(), + SymbolKind::NAMESPACE, + range, + range, + line_index, + text, + Some(children), + ) + } else { + None + } +} + +fn process_bind(bind: &Bind, text: &str, line_index: &LineIndex) -> Option { + match bind { + Bind::BindDestruct(bd) => process_bind_destruct(bd, text, line_index), + Bind::BindFunction(bf) => process_bind_function(bf, text, line_index), + } +} + +fn process_bind_destruct( + bind: &BindDestruct, + text: &str, + line_index: &LineIndex, +) -> Option { + let destruct = bind.into()?; + let name = get_destruct_name(&destruct)?; + let range = bind.syntax().text_range(); + + let (kind, children) = bind + .value() + .map_or((SymbolKind::VARIABLE, None), |value_expr| { + value_expr + .expr_base() + .map_or((SymbolKind::VARIABLE, None), |base| match &base { + ExprBase::ExprFunction(_) => (SymbolKind::FUNCTION, None), + ExprBase::ExprObject(obj) => { + let children = process_object(obj, text, line_index); + ( + SymbolKind::OBJECT, + if children.is_empty() { + None + } else { + Some(children) + }, + ) + } + _ => (SymbolKind::VARIABLE, None), + }) + }); + + create_symbol(name, kind, range, range, line_index, text, children) +} + +fn process_bind_function( + bind: &BindFunction, + text: &str, + line_index: &LineIndex, +) -> Option { + let name = bind.name()?.ident_lit()?.text().to_string(); + let range = bind.syntax().text_range(); + + let detail = bind.params().map(|params| { + let param_names: Vec<_> = params + .params() + .filter_map(|p| p.destruct().and_then(|d| get_destruct_name(&d))) + .collect(); + format!("({})", param_names.join(", ")) + }); + + build_document_symbol( + name, + detail, + SymbolKind::FUNCTION, + to_lsp_range(range, line_index, text), + to_lsp_range(range, line_index, text), + None, + ) +} + +fn process_expr_base(base: &ExprBase, text: &str, line_index: &LineIndex) -> Vec { + match base { + ExprBase::ExprObject(obj) => process_object(obj, text, line_index), + _ => Vec::new(), + } +} + +fn process_object(obj: &ExprObject, text: &str, line_index: &LineIndex) -> Vec { + let Some(body) = obj.obj_body() else { + return Vec::new(); + }; + + match body { + ObjBody::ObjBodyMemberList(list) => process_member_list(&list, text, line_index), + ObjBody::ObjBodyComp(_) => Vec::new(), + } +} + +fn process_member_list( + list: &ObjBodyMemberList, + text: &str, + line_index: &LineIndex, +) -> Vec { + let mut symbols = Vec::new(); + + for member in list.members() { + if let Some(sym) = process_member(&member, text, line_index) { + symbols.push(sym); + } + } + + symbols +} + +fn process_member(member: &Member, text: &str, line_index: &LineIndex) -> Option { + match member { + Member::MemberBindStmt(bind_stmt) => process_member_bind(bind_stmt, text, line_index), + Member::MemberFieldNormal(field) => process_field_normal(field, text, line_index), + Member::MemberFieldMethod(method) => process_field_method(method, text, line_index), + Member::MemberAssertStmt(_) => None, + } +} + +fn process_member_bind( + bind_stmt: &MemberBindStmt, + text: &str, + line_index: &LineIndex, +) -> Option { + let obj_local = bind_stmt.obj_local()?; + process_obj_local(&obj_local, text, line_index) +} + +fn process_obj_local( + obj_local: &ObjLocal, + text: &str, + line_index: &LineIndex, +) -> Option { + let bind = obj_local.bind()?; + process_bind(&bind, text, line_index) +} + +fn process_field_normal( + field: &MemberFieldNormal, + text: &str, + line_index: &LineIndex, +) -> Option { + let name = get_field_name(&field.field_name()?)?; + let range = field.syntax().text_range(); + + let children = field.expr().and_then(|expr| { + if let Some(ExprBase::ExprObject(obj)) = expr.expr_base() { + let children = process_object(&obj, text, line_index); + if children.is_empty() { + None + } else { + Some(children) + } + } else { + None + } + }); + + let kind = field.expr().map_or(SymbolKind::FIELD, |expr| { + expr.expr_base() + .map_or(SymbolKind::FIELD, |base| match base { + ExprBase::ExprFunction(_) => SymbolKind::FUNCTION, + ExprBase::ExprObject(_) => SymbolKind::OBJECT, + ExprBase::ExprArray(_) => SymbolKind::ARRAY, + _ => SymbolKind::FIELD, + }) + }); + + create_symbol(name, kind, range, range, line_index, text, children) +} + +fn process_field_method( + method: &MemberFieldMethod, + text: &str, + line_index: &LineIndex, +) -> Option { + let name = get_field_name(&method.field_name()?)?; + let range = method.syntax().text_range(); + + let detail = method.params_desc().map(|params| { + let param_names: Vec<_> = params + .params() + .filter_map(|p| p.destruct().and_then(|d| get_destruct_name(&d))) + .collect(); + format!("({})", param_names.join(", ")) + }); + + build_document_symbol( + name, + detail, + SymbolKind::METHOD, + to_lsp_range(range, line_index, text), + to_lsp_range(range, line_index, text), + None, + ) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + #[test] + fn test_empty_object() { + let doc = Document::new("{}".to_string(), DocVersion::new(1)); + let symbols = document_symbols(&doc); + assert!(symbols.is_empty()); + } + + #[test] + fn test_object_with_fields() { + let doc = Document::new( + r#"{ name: "test", value: 42, nested: { inner: true } }"#.to_string(), + DocVersion::new(1), + ); + let symbols = document_symbols(&doc); + + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["name", "value", "nested"]); + + let nested_children = symbols[2] + .children + .as_ref() + .expect("nested should have children"); + let nested_names: Vec<_> = nested_children.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(nested_names, vec!["inner"]); + } + + #[test] + fn test_local_bindings() { + let doc = Document::new( + r"local x = 1; local y = 2; { a: x, b: y }".to_string(), + DocVersion::new(1), + ); + let symbols = document_symbols(&doc); + + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["x", "y", "a", "b"]); + } + + #[test] + fn test_function_binding() { + let doc = Document::new( + r"local add(a, b) = a + b; { result: add(1, 2) }".to_string(), + DocVersion::new(1), + ); + let symbols = document_symbols(&doc); + + let symbol_info: Vec<(&str, SymbolKind)> = + symbols.iter().map(|s| (s.name.as_str(), s.kind)).collect(); + assert_eq!( + symbol_info, + vec![("add", SymbolKind::FUNCTION), ("result", SymbolKind::FIELD)] + ); + } + + #[test] + fn test_method_field() { + let doc = Document::new( + r#"{ greet(name): "Hello, " + name }"#.to_string(), + DocVersion::new(1), + ); + let symbols = document_symbols(&doc); + + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["greet"]); + assert_eq!(symbols[0].kind, SymbolKind::METHOD); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/symbols/mod.rs b/crates/jrsonnet-lsp-handlers/src/symbols/mod.rs new file mode 100644 index 00000000..daba2776 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/symbols/mod.rs @@ -0,0 +1,11 @@ +//! Document and workspace symbol handlers. +//! +//! Extracts symbols from Jsonnet AST nodes and exposes both tree-style +//! document symbols and flat workspace symbol views. + +mod extract; +mod names; +mod workspace; + +pub use extract::document_symbols; +pub use workspace::workspace_symbols_for_document; diff --git a/crates/jrsonnet-lsp-handlers/src/symbols/names.rs b/crates/jrsonnet-lsp-handlers/src/symbols/names.rs new file mode 100644 index 00000000..7341b730 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/symbols/names.rs @@ -0,0 +1,75 @@ +use jrsonnet_lsp_document::{to_lsp_range, LineIndex}; +use jrsonnet_rowan_parser::{ + nodes::{FieldName, FieldNameFixed}, + AstToken, +}; +use lsp_types::{DocumentSymbol, SymbolKind}; +use rowan::TextRange; + +pub(super) fn get_field_name(field_name: &FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => get_fixed_field_name(fixed), + FieldName::FieldNameDynamic(_) => Some("[computed]".to_string()), + } +} + +fn get_fixed_field_name(fixed: &FieldNameFixed) -> Option { + if let Some(name) = fixed.id() { + Some(name.ident_lit()?.text().to_string()) + } else { + fixed + .text() + .map(|text| text.text().trim_matches('"').trim_matches('\'').to_string()) + } +} + +pub(super) fn get_destruct_name( + destruct: &jrsonnet_rowan_parser::nodes::Destruct, +) -> Option { + use jrsonnet_rowan_parser::nodes::Destruct; + match destruct { + Destruct::DestructFull(full) => Some(full.name()?.ident_lit()?.text().to_string()), + Destruct::DestructSkip(_) => None, + Destruct::DestructArray(_) => Some("[array]".to_string()), + Destruct::DestructObject(_) => Some("{object}".to_string()), + } +} + +pub(super) fn create_symbol( + name: String, + kind: SymbolKind, + range: TextRange, + selection_range: TextRange, + line_index: &LineIndex, + text: &str, + children: Option>, +) -> Option { + build_document_symbol( + name, + None, + kind, + to_lsp_range(range, line_index, text), + to_lsp_range(selection_range, line_index, text), + children, + ) +} + +pub(super) fn build_document_symbol( + name: String, + detail: Option, + kind: SymbolKind, + range: lsp_types::Range, + selection_range: lsp_types::Range, + children: Option>, +) -> Option { + serde_json::from_value(serde_json::json!({ + "name": name, + "detail": detail, + "kind": kind, + "tags": Option::>::None, + "range": range, + "selectionRange": selection_range, + "children": children, + })) + .ok() +} diff --git a/crates/jrsonnet-lsp-handlers/src/symbols/workspace.rs b/crates/jrsonnet-lsp-handlers/src/symbols/workspace.rs new file mode 100644 index 00000000..b3f59563 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/symbols/workspace.rs @@ -0,0 +1,129 @@ +use jrsonnet_lsp_document::Document; +use lsp_types::{Location, SymbolInformation, Uri}; + +use super::extract::document_symbols; + +fn build_symbol_information( + name: String, + kind: lsp_types::SymbolKind, + tags: Option>, + location: Location, + container_name: Option, +) -> Option { + serde_json::from_value(serde_json::json!({ + "name": name, + "kind": kind, + "tags": tags, + "location": location, + "containerName": container_name, + })) + .ok() +} + +pub fn workspace_symbols_for_document( + document: &Document, + uri: &Uri, + query: &str, +) -> Vec { + let doc_symbols = document_symbols(document); + let mut results = Vec::new(); + + flatten_symbols(&doc_symbols, uri, query, None, &mut results); + + results +} + +fn flatten_symbols( + symbols: &[lsp_types::DocumentSymbol], + uri: &Uri, + query: &str, + container_name: Option<&str>, + results: &mut Vec, +) { + let query_lower = query.to_lowercase(); + + for symbol in symbols { + let matches = query.is_empty() || symbol.name.to_lowercase().contains(&query_lower); + + if matches { + let location = Location { + uri: uri.clone(), + range: symbol.range, + }; + let symbol_info = build_symbol_information( + symbol.name.clone(), + symbol.kind, + symbol.tags.clone(), + location, + container_name.map(String::from), + ); + if let Some(symbol_info) = symbol_info { + results.push(symbol_info); + } + } + + if let Some(children) = &symbol.children { + flatten_symbols(children, uri, query, Some(&symbol.name), results); + } + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + #[test] + fn test_workspace_symbols_empty_query() { + let doc = Document::new( + r#"local x = 1; { name: "test", value: x }"#.to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, ""); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["x", "name", "value"]); + } + + #[test] + fn test_workspace_symbols_with_query() { + let doc = Document::new( + r"{ myField: 1, otherField: 2, myMethod(x): x }".to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, "my"); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["myField", "myMethod"]); + } + + #[test] + fn test_workspace_symbols_case_insensitive() { + let doc = Document::new( + r"{ MyField: 1, myfield: 2, MYFIELD: 3 }".to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, "myfield"); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["MyField", "myfield", "MYFIELD"]); + } + + #[test] + fn test_workspace_symbols_nested() { + let doc = Document::new( + r"{ outer: { innerField: 1 } }".to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, "inner"); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["innerField"]); + assert_eq!(symbols[0].container_name, Some("outer".to_string())); + } +} diff --git a/crates/jrsonnet-lsp-import/Cargo.toml b/crates/jrsonnet-lsp-import/Cargo.toml new file mode 100644 index 00000000..4f3e82bd --- /dev/null +++ b/crates/jrsonnet-lsp-import/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "jrsonnet-lsp-import" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Import graph and work queue for jrsonnet LSP" + +[dependencies] +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +rayon = "1.11.0" +rowan.workspace = true +rustc-hash.workspace = true + +[dev-dependencies] +criterion = { version = "0.5", features = ["html_reports"] } +tempfile.workspace = true + +[[bench]] +name = "import_graph" +harness = false + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-import/benches/import_graph.rs b/crates/jrsonnet-lsp-import/benches/import_graph.rs new file mode 100644 index 00000000..ffe9b6f4 --- /dev/null +++ b/crates/jrsonnet-lsp-import/benches/import_graph.rs @@ -0,0 +1,156 @@ +//! Benchmarks for import-graph operations. + +use std::path::PathBuf; + +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; +use jrsonnet_lsp_document::{CanonicalPath, PathStore}; +use jrsonnet_lsp_import::{ImportEntry, ImportGraph, ImportKind}; + +fn bench_path(index: usize) -> CanonicalPath { + CanonicalPath::new(PathBuf::from(format!("/bench/file-{index}.jsonnet"))) +} + +fn bench_update_file_with_entries(c: &mut Criterion) { + let mut group = c.benchmark_group("import_graph/update_file_with_entries"); + + for dep_count in [1usize, 8, 32, 128] { + let mut graph = ImportGraph::new(PathStore::new()); + let main_path = bench_path(0); + let main_file = graph.intern(&main_path); + + let mut entries_template = Vec::with_capacity(dep_count); + for dep_idx in 0..dep_count { + let dep_path = bench_path(dep_idx + 1); + let dep_file = graph.intern(&dep_path); + entries_template.push(ImportEntry { + kind: ImportKind::Code, + binding_name: None, + import_path: format!("dep-{dep_idx}.jsonnet"), + resolved_file: Some(dep_file), + resolved_path: Some(dep_path), + }); + } + + group.throughput(Throughput::Elements(dep_count as u64)); + group.bench_function(BenchmarkId::new("deps", dep_count), |b| { + b.iter_batched( + || entries_template.clone(), + |entries| { + graph.update_file_with_entries(main_file, black_box(entries)); + }, + criterion::BatchSize::SmallInput, + ); + }); + } + + group.finish(); +} + +fn build_chain_graph( + size: usize, +) -> ( + ImportGraph, + Vec, + Vec, +) { + let mut graph = ImportGraph::new(PathStore::new()); + let mut paths = Vec::with_capacity(size); + let mut files = Vec::with_capacity(size); + + for idx in 0..size { + let path = bench_path(idx); + let file = graph.intern(&path); + paths.push(path); + files.push(file); + } + + for idx in 0..size.saturating_sub(1) { + graph.update_file_with_entries( + files[idx], + vec![ImportEntry { + kind: ImportKind::Code, + binding_name: None, + import_path: format!("file-{}.jsonnet", idx + 1), + resolved_file: Some(files[idx + 1]), + resolved_path: Some(paths[idx + 1].clone()), + }], + ); + } + + (graph, paths, files) +} + +fn bench_transitive_importers(c: &mut Criterion) { + let mut group = c.benchmark_group("import_graph/transitive_importers"); + + for size in [64usize, 512, 2048] { + let (graph, _, files) = build_chain_graph(size); + let leaf = files[size - 1]; + + group.throughput(Throughput::Elements(size as u64)); + group.bench_function(BenchmarkId::new("chain", size), |b| { + b.iter(|| black_box(graph.transitive_importers(leaf))); + }); + } + + group.finish(); +} + +fn build_bulk_fixture( + file_count: usize, + fanout: usize, +) -> Vec<(CanonicalPath, Vec)> { + let paths: Vec<_> = (0..file_count).map(bench_path).collect(); + + let mut fixture = Vec::with_capacity(file_count); + for (idx, path) in paths.iter().enumerate() { + let mut deps = Vec::with_capacity(fanout); + for dep_path in paths.iter().skip(idx + 1).take(fanout) { + deps.push(dep_path.clone()); + } + fixture.push((path.clone(), deps)); + } + + fixture +} + +fn bench_bulk_update(c: &mut Criterion) { + let mut group = c.benchmark_group("import_graph/bulk_update"); + + for file_count in [128usize, 1024] { + let fanout = 3usize; + let fixture = build_bulk_fixture(file_count, fanout); + + group.throughput(Throughput::Elements(file_count as u64)); + group.bench_function(BenchmarkId::new("files", file_count), |b| { + b.iter(|| { + let mut graph = ImportGraph::new(PathStore::new()); + for (path, deps) in &fixture { + let file = graph.intern(path); + let entries: Vec<_> = deps + .iter() + .map(|dep_path| ImportEntry { + kind: ImportKind::Code, + binding_name: None, + import_path: dep_path.as_path().to_string_lossy().into_owned(), + resolved_file: None, + resolved_path: Some(dep_path.clone()), + }) + .collect(); + graph.update_file_with_entries(file, entries); + } + black_box(graph.file_count()); + }); + }); + } + + group.finish(); +} + +criterion_group!( + benches, + bench_update_file_with_entries, + bench_transitive_importers, + bench_bulk_update +); +criterion_main!(benches); diff --git a/crates/jrsonnet-lsp-import/src/graph/mod.rs b/crates/jrsonnet-lsp-import/src/graph/mod.rs new file mode 100644 index 00000000..18c8888e --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/graph/mod.rs @@ -0,0 +1,9 @@ +mod operations; +mod parse; +mod traversal; + +pub use operations::{ + parse_document_import_occurrences, parse_document_import_occurrences_approximate, + parse_document_imports, parse_document_imports_approximate, ImportEntry, ImportGraph, + ImportKind, ImportOccurrence, ImportParseMode, +}; diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs new file mode 100644 index 00000000..6dc6388f --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -0,0 +1,392 @@ +//! Import graph for tracking file dependencies. +//! +//! Maintains a bidirectional graph of import relationships between files, +//! enabling efficient cross-file reference lookups. + +use std::{collections::VecDeque, sync::Arc}; + +use jrsonnet_lsp_document::{CanonicalPath, Document, FileId, PathResolver, PathStore}; +use rustc_hash::{FxHashMap, FxHashSet}; + +pub use super::parse::{ + parse_document_import_occurrences, parse_document_import_occurrences_approximate, + parse_document_imports, parse_document_imports_approximate, +}; + +/// Information about an import in a file. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ImportEntry { + /// Import flavor (`import`, `importstr`, `importbin`). + pub kind: ImportKind, + /// The binding name if this import is bound to a variable. + /// e.g., "lib" in `local lib = import "lib.jsonnet"` + pub binding_name: Option, + /// The raw import path as written in the source. + pub import_path: String, + /// Interned file id of the resolved target, when known. + pub resolved_file: Option, + /// The resolved canonical path of the imported file. + pub resolved_path: Option, +} + +/// Jsonnet import flavor. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ImportKind { + Code, + String, + Binary, +} + +/// One import occurrence in source, including its location. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ImportOccurrence { + /// Semantic import entry data. + pub entry: ImportEntry, + /// Range of the import path token in source (e.g. `"foo.libsonnet"`). + pub import_range: rowan::TextRange, +} + +/// How import entries for a file were produced. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ImportParseMode { + /// Entries come from the full AST path. + Precise, + /// Entries come from a cheaper token-only approximation. + Approximate, +} + +/// Import graph tracking dependencies between files. +/// +/// This structure maintains two maps: +/// - `imports`: file → list of files it imports +/// - `imported_by`: file → list of files that import it (reverse index) +#[derive(Debug)] +pub struct ImportGraph { + /// Interned mapping between canonical paths and stable file ids. + pub(super) paths: PathStore, + /// Read-only resolver over interned mapping. + pub(super) resolver: PathResolver, + /// Map of file → import entries in that file. + pub(super) imports: FxHashMap>, + /// Cache of resolved imports by raw import path (`import`/`importstr`/`importbin`). + pub(super) resolved_imports: FxHashMap>>, + /// Cache of resolved code imports by raw import path (`import` only). + pub(super) resolved_code_imports: FxHashMap>>, + /// Reverse index: file → files that import it. + pub(super) imported_by: FxHashMap>, + /// Parse mode used for each file's current import entries. + pub(super) parse_modes: FxHashMap, +} + +impl ImportGraph { + /// Create a new empty import graph. + #[must_use] + pub fn new(paths: PathStore) -> Self { + let resolver = paths.resolver(); + Self { + paths, + resolver, + imports: FxHashMap::default(), + resolved_imports: FxHashMap::default(), + resolved_code_imports: FxHashMap::default(), + imported_by: FxHashMap::default(), + parse_modes: FxHashMap::default(), + } + } + + /// Get or create the interned file id for `path`. + #[must_use] + pub fn intern(&self, path: &CanonicalPath) -> FileId { + self.paths.intern(path) + } + + /// Resolve a file path to an interned file identifier. + #[must_use] + pub fn file(&self, path: &CanonicalPath) -> Option { + self.resolver.file(path) + } + + /// Borrow an interned file identifier's canonical path. + #[must_use] + pub fn path(&self, file: FileId) -> Option> { + self.resolver.path(file) + } + + /// Update the import graph for a file. + /// + /// This parses the document to find all imports, resolves their paths, + /// and updates both the forward and reverse maps. + /// Update a file's imports in the graph with pre-parsed entries. + /// + /// This is the preferred method when you want to minimize lock hold time. + /// Parse the imports first using [`parse_document_imports`], then call this + /// method while holding the write lock. + pub fn update_file_with_entries(&mut self, file_id: FileId, entries: Vec) { + self.update_file_with_entries_mode(file_id, entries, ImportParseMode::Precise); + } + + /// Update a file's imports in the graph with explicit parse mode. + pub fn update_file_with_entries_mode( + &mut self, + file_id: FileId, + mut entries: Vec, + parse_mode: ImportParseMode, + ) { + self.resolve_entry_files(&mut entries); + debug_assert!( + entries + .iter() + .all(|entry| { entry.resolved_path.is_none() || entry.resolved_file.is_some() }), + "import graph entries with resolved_path must also include resolved_file" + ); + + // Remove old entries for this file + self.remove_file(file_id); + + // Update imported_by reverse index + for entry in &entries { + if let Some(resolved_id) = entry.resolved_file { + self.imported_by + .entry(resolved_id) + .or_default() + .insert(file_id); + } + } + + let (resolved_imports, resolved_code_imports) = Self::build_resolved_import_maps(&entries); + if resolved_imports.is_empty() { + self.resolved_imports.remove(&file_id); + } else { + self.resolved_imports + .insert(file_id, Arc::new(resolved_imports)); + } + if resolved_code_imports.is_empty() { + self.resolved_code_imports.remove(&file_id); + } else { + self.resolved_code_imports + .insert(file_id, Arc::new(resolved_code_imports)); + } + + // Store the import entries + self.imports.insert(file_id, entries); + self.parse_modes.insert(file_id, parse_mode); + } + + /// Update a file's imports in the graph. + /// + /// This parses the document and updates the import graph atomically. + /// For better performance when parsing is slow, use [`parse_document_imports`] + /// followed by [`update_file_with_entries`] to parse outside the lock. + pub fn update_file(&mut self, file_id: FileId, doc: &Document, resolve_import: F) + where + F: Fn(&str) -> Option, + { + let mut entries = parse_document_imports(doc, &resolve_import); + self.resolve_entry_files(&mut entries); + self.update_file_with_entries(file_id, entries); + } + + /// Fill `resolved_file` for entries that currently only carry + /// a canonical `resolved_path`. + pub fn resolve_entry_files(&self, entries: &mut [ImportEntry]) { + for entry in entries { + if entry.resolved_file.is_some() { + continue; + } + let Some(path) = entry.resolved_path.as_ref() else { + continue; + }; + entry.resolved_file = Some( + self.resolver + .file(path) + .unwrap_or_else(|| self.paths.intern(path)), + ); + } + } + + /// Remove a file from the import graph. + /// + /// This removes the file's import entries and updates the reverse index. + pub fn remove_file(&mut self, file_id: FileId) { + // Remove from imported_by reverse index + if let Some(old_entries) = self.imports.get(&file_id) { + for entry in old_entries { + let Some(resolved_id) = entry.resolved_file else { + continue; + }; + let should_remove_entry = + self.imported_by + .get_mut(&resolved_id) + .is_some_and(|importers| { + importers.remove(&file_id); + importers.is_empty() + }); + if should_remove_entry { + self.imported_by.remove(&resolved_id); + } + } + } + + // Remove the import entries + self.imports.remove(&file_id); + self.resolved_imports.remove(&file_id); + self.resolved_code_imports.remove(&file_id); + self.parse_modes.remove(&file_id); + } + + /// Get the parse mode for a file's stored import entries. + #[must_use] + pub fn parse_mode(&self, file: FileId) -> Option { + self.parse_modes.get(&file).copied() + } + + /// Returns true when a file currently has precise import entries. + #[must_use] + pub fn is_precise(&self, file: FileId) -> bool { + matches!(self.parse_mode(file), Some(ImportParseMode::Precise)) + } + + pub(super) fn direct_importers_by_id(&self, file_id: FileId) -> Vec { + self.imported_by + .get(&file_id) + .map(|s| s.iter().copied().collect()) + .unwrap_or_default() + } + + #[must_use] + pub(super) fn resolved_entry_id(entry: &ImportEntry) -> Option { + entry.resolved_file + } + + /// Get the files that directly import a given file. + #[must_use] + pub fn direct_importers(&self, file: FileId) -> Vec { + let mut importers = self.direct_importers_by_id(file); + importers.sort_unstable(); + importers + } + + /// Get all files that transitively import a given file. + /// + /// This performs a breadth-first search through the import graph + /// to find all files that depend on the given file, directly or indirectly. + #[must_use] + pub fn transitive_importers(&self, file: FileId) -> Vec { + let mut result = FxHashSet::default(); + let mut queue = VecDeque::from([file]); + + while let Some(current) = queue.pop_front() { + for importer in self.direct_importers_by_id(current) { + if result.insert(importer) { + queue.push_back(importer); + } + } + } + + let mut importers: Vec<_> = result.into_iter().collect(); + importers.sort_unstable(); + importers + } + + /// Get the import entries for a file. + pub fn imports(&self, file: FileId) -> &[ImportEntry] { + self.imports.get(&file).map_or(&[], Vec::as_slice) + } + + /// Get cached resolved imports (`import`, `importstr`, `importbin`) for a file. + pub fn resolved_import_map(&self, file: FileId) -> Option<&FxHashMap> { + self.resolved_imports.get(&file).map(Arc::as_ref) + } + + /// Get shared cached resolved imports (`import`, `importstr`, `importbin`) for a file. + pub fn resolved_import_map_arc(&self, file: FileId) -> Option>> { + self.resolved_imports.get(&file).cloned() + } + + /// Get cached resolved code imports (`import`) for a file. + pub fn resolved_code_import_map(&self, file: FileId) -> Option<&FxHashMap> { + self.resolved_code_imports.get(&file).map(Arc::as_ref) + } + + /// Get shared cached resolved code imports (`import`) for a file. + pub fn resolved_code_import_map_arc( + &self, + file: FileId, + ) -> Option>> { + self.resolved_code_imports.get(&file).cloned() + } + + /// Resolve one raw import path from a file using the cached import map. + pub fn resolved_import(&self, file: FileId, import_path: &str) -> Option { + self.resolved_import_map(file)?.get(import_path).copied() + } + + /// Find imports in a file that point to a specific target file. + #[must_use] + pub fn imports_of_target(&self, file_id: FileId, target_id: FileId) -> Vec<&ImportEntry> { + self.imports + .get(&file_id) + .map(|entries| { + entries + .iter() + .filter(|entry| Self::resolved_entry_id(entry) == Some(target_id)) + .collect() + }) + .unwrap_or_default() + } + + /// Get the number of files tracked in the graph. + #[must_use] + pub fn file_count(&self) -> usize { + self.imports.len() + } + + /// Get all files tracked in the graph. + pub fn all_files(&self) -> impl Iterator + '_ { + self.imports.keys().copied() + } + + /// Get files that currently contain unresolved imports. + #[must_use] + pub fn files_with_unresolved_imports(&self) -> Vec { + let mut files = self + .imports + .iter() + .filter_map(|(&file, entries)| { + entries + .iter() + .any(|entry| entry.resolved_file.is_none()) + .then_some(file) + }) + .collect::>(); + files.sort_unstable(); + files + } + + fn build_resolved_import_maps( + entries: &[ImportEntry], + ) -> (FxHashMap, FxHashMap) { + let mut resolved_imports = FxHashMap::default(); + let mut resolved_code_imports = FxHashMap::default(); + + for entry in entries { + let Some(resolved_file) = entry.resolved_file else { + continue; + }; + resolved_imports + .entry(entry.import_path.clone()) + .or_insert(resolved_file); + if entry.kind == ImportKind::Code { + resolved_code_imports + .entry(entry.import_path.clone()) + .or_insert(resolved_file); + } + } + + (resolved_imports, resolved_code_imports) + } +} + +#[cfg(test)] +#[path = "tests.rs"] +mod tests; diff --git a/crates/jrsonnet-lsp-import/src/graph/parse.rs b/crates/jrsonnet-lsp-import/src/graph/parse.rs new file mode 100644 index 00000000..6c902925 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/graph/parse.rs @@ -0,0 +1,295 @@ +use jrsonnet_lsp_document::{strip_string_quotes, CanonicalPath, Document}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, ExprImport, ImportKindKind, StmtLocal}, + AstNode, AstToken, SyntaxKind, +}; +use rustc_hash::FxHashSet; + +use super::{ImportEntry, ImportKind, ImportOccurrence}; +use crate::parse::extract_import_path; + +/// Parse import statements from a document. +/// +/// This extracts all import entries from the document without modifying +/// the import graph. Use this when you want to parse outside a lock, +/// then pass the results to [`ImportGraph::update_file_with_entries`]. +pub fn parse_document_imports(doc: &Document, resolve_import: &F) -> Vec +where + F: Fn(&str) -> Option, +{ + parse_document_import_occurrences(doc, resolve_import) + .into_iter() + .map(|occurrence| occurrence.entry) + .collect() +} + +/// Parse import statements using a token-only fast path. +/// +/// This is less precise than [`parse_document_imports`] for some malformed +/// constructs, but significantly cheaper for broad background indexing. +pub fn parse_document_imports_approximate(doc: &Document, resolve_import: &F) -> Vec +where + F: Fn(&str) -> Option, +{ + parse_document_import_occurrences_approximate(doc, resolve_import) + .into_iter() + .map(|occurrence| occurrence.entry) + .collect() +} + +/// Parse import occurrences from a document with source ranges. +/// +/// This is useful for diagnostics where callers need to point at the exact +/// import token in source when a path cannot be resolved. +pub fn parse_document_import_occurrences( + doc: &Document, + resolve_import: &F, +) -> Vec +where + F: Fn(&str) -> Option, +{ + let mut occurrences = Vec::new(); + let mut seen_expr_import_ranges = FxHashSet::default(); + let mut seen_string_ranges = FxHashSet::default(); + let ast = doc.ast(); + + // Single pass over AST nodes: + // - capture local-bound imports with binding names + // - capture bare import expressions + // - dedupe imports that were already captured from local statements + for node in ast.syntax().descendants() { + match node.kind() { + SyntaxKind::STMT_LOCAL => { + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + if let Some((occurrence, import_range)) = + parse_bind_import_with_range(&bind, resolve_import) + { + seen_expr_import_ranges.insert(import_range); + seen_string_ranges.insert(occurrence.import_range); + occurrences.push(occurrence); + } + } + } + } + SyntaxKind::EXPR_IMPORT => { + let range = node.text_range(); + // Skip if we already captured this import via a local statement. + if !seen_expr_import_ranges.insert(range) { + continue; + } + if let Some(import) = ExprImport::cast(node) { + if let Some(occurrence) = parse_import_occurrence(&import, None, resolve_import) + { + seen_string_ranges.insert(occurrence.import_range); + occurrences.push(occurrence); + } + } + } + _ => {} + } + } + + // Fallback pass: recover imports from token stream for syntax-broken files. + occurrences.extend(parse_token_import_occurrences( + doc, + resolve_import, + &mut seen_string_ranges, + )); + + occurrences +} + +/// Parse import occurrences with a token-only approximation. +pub fn parse_document_import_occurrences_approximate( + doc: &Document, + resolve_import: &F, +) -> Vec +where + F: Fn(&str) -> Option, +{ + let mut seen_string_ranges = FxHashSet::default(); + parse_token_import_occurrences(doc, resolve_import, &mut seen_string_ranges) +} + +/// Parse a bind to extract import information, returning the import's text range. +fn parse_bind_import_with_range( + bind: &Bind, + resolve_import: &F, +) -> Option<(ImportOccurrence, rowan::TextRange)> +where + F: Fn(&str) -> Option, +{ + let Bind::BindDestruct(bd) = bind else { + return None; + }; + + let destruct = bd.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + + let bind_name = full.name()?.ident_lit()?.text().to_string(); + + // Check if the expression is an import. + let expr = bd.value()?; + for node in expr.syntax().descendants() { + if node.kind() == SyntaxKind::EXPR_IMPORT { + let range = node.text_range(); + if let Some(import) = ExprImport::cast(node) { + if let Some(occurrence) = + parse_import_occurrence(&import, Some(bind_name.clone()), resolve_import) + { + return Some((occurrence, range)); + } + } + } + } + + None +} + +fn parse_import_occurrence( + import: &ExprImport, + binding_name: Option, + resolve_import: &F, +) -> Option +where + F: Fn(&str) -> Option, +{ + let kind = import_kind_from_expr(import)?; + let path = extract_import_path(import)?; + let resolved = resolve_import(&path); + let import_range = import.text()?.syntax().text_range(); + + Some(ImportOccurrence { + entry: ImportEntry { + kind, + binding_name, + import_path: path, + resolved_file: None, + resolved_path: resolved, + }, + import_range, + }) +} + +fn parse_token_import_occurrences( + doc: &Document, + resolve_import: &F, + seen_string_ranges: &mut FxHashSet, +) -> Vec +where + F: Fn(&str) -> Option, +{ + let mut occurrences = Vec::new(); + let mut pending_import_kind = None; + + for token in doc + .ast() + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + let kind = token.kind(); + if is_import_keyword(kind) { + pending_import_kind = Some(kind); + continue; + } + + if matches!( + kind, + SyntaxKind::WHITESPACE + | SyntaxKind::MULTI_LINE_COMMENT + | SyntaxKind::SINGLE_LINE_HASH_COMMENT + | SyntaxKind::SINGLE_LINE_SLASH_COMMENT + ) { + continue; + } + + let Some(keyword_kind) = pending_import_kind.take() else { + continue; + }; + if !is_import_string_token(kind) { + continue; + } + + let import_range = token.text_range(); + if !seen_string_ranges.insert(import_range) { + continue; + } + + let import_path = strip_string_quotes(token.text()); + if import_path.is_empty() { + continue; + } + + let Some(import_kind) = import_kind_from_keyword_token(keyword_kind) else { + continue; + }; + + occurrences.push(ImportOccurrence { + entry: ImportEntry { + kind: import_kind, + binding_name: binding_name_from_import_token(&token), + resolved_path: resolve_import(&import_path), + import_path, + resolved_file: None, + }, + import_range, + }); + } + + occurrences +} + +const fn is_import_keyword(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::IMPORT_KW | SyntaxKind::IMPORTSTR_KW | SyntaxKind::IMPORTBIN_KW + ) +} + +const fn is_import_string_token(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::STRING_DOUBLE + | SyntaxKind::STRING_SINGLE + | SyntaxKind::STRING_DOUBLE_VERBATIM + | SyntaxKind::STRING_SINGLE_VERBATIM + | SyntaxKind::ERROR_STRING_DOUBLE_UNTERMINATED + | SyntaxKind::ERROR_STRING_SINGLE_UNTERMINATED + | SyntaxKind::ERROR_STRING_DOUBLE_VERBATIM_UNTERMINATED + | SyntaxKind::ERROR_STRING_SINGLE_VERBATIM_UNTERMINATED + ) +} + +fn binding_name_from_import_token(token: &jrsonnet_rowan_parser::SyntaxToken) -> Option { + let bind = token.parent()?.ancestors().find_map(Bind::cast)?; + let Bind::BindDestruct(bind_destruct) = bind else { + return None; + }; + let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind_destruct)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.ident_lit()?.text().to_string()) +} + +fn import_kind_from_expr(import: &ExprImport) -> Option { + let token_kind = import.import_kind()?.kind(); + Some(match token_kind { + ImportKindKind::ImportKw => ImportKind::Code, + ImportKindKind::ImportstrKw => ImportKind::String, + ImportKindKind::ImportbinKw => ImportKind::Binary, + }) +} + +const fn import_kind_from_keyword_token(kind: SyntaxKind) -> Option { + match kind { + SyntaxKind::IMPORT_KW => Some(ImportKind::Code), + SyntaxKind::IMPORTSTR_KW => Some(ImportKind::String), + SyntaxKind::IMPORTBIN_KW => Some(ImportKind::Binary), + _ => None, + } +} diff --git a/crates/jrsonnet-lsp-import/src/graph/tests.rs b/crates/jrsonnet-lsp-import/src/graph/tests.rs new file mode 100644 index 00000000..5fe84ed8 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/graph/tests.rs @@ -0,0 +1,744 @@ +use std::path::PathBuf; + +use jrsonnet_lsp_document::DocVersion; + +use super::*; + +fn test_path(name: &str) -> CanonicalPath { + CanonicalPath::new(PathBuf::from(format!("/test/{name}"))) +} + +/// A simple resolver that just appends the import path to /test/ +fn simple_resolver(import: &str) -> Option { + if import.is_empty() { + None + } else { + Some(test_path(import)) + } +} + +fn graph_paths(graph: &ImportGraph, files: Vec) -> Vec { + files + .into_iter() + .filter_map(|file| graph.path(file).map(|path| path.as_ref().clone())) + .collect() +} + +#[test] +fn test_parse_local_import() { + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let entries = parse_document_imports(&doc, &simple_resolver); + + assert_eq!( + entries, + vec![ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_file: None, + resolved_path: Some(test_path("lib.jsonnet")), + }] + ); +} + +#[test] +fn test_parse_import_occurrences_include_string_token_range() { + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let occurrences = parse_document_import_occurrences(&doc, &simple_resolver); + let start = u32::try_from( + code.find("\"lib.jsonnet\"") + .expect("import string should exist"), + ) + .expect("import start offset should fit into u32"); + let end = start + + u32::try_from("\"lib.jsonnet\"".len()) + .expect("import literal length should fit into u32"); + + assert_eq!( + occurrences, + vec![ImportOccurrence { + entry: ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_file: None, + resolved_path: Some(test_path("lib.jsonnet")), + }, + import_range: rowan::TextRange::new(start.into(), end.into()), + }] + ); +} + +#[test] +fn test_parse_import_occurrences_fallback_unterminated_string() { + let code = r#"local lib = import "lib.jsonnet"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let occurrences = parse_document_import_occurrences(&doc, &simple_resolver); + let start = u32::try_from( + code.find("\"lib.jsonnet") + .expect("unterminated import string should exist"), + ) + .expect("unterminated import start should fit into u32"); + let end = u32::try_from(code.len()).expect("source length should fit into u32"); + + assert_eq!( + occurrences, + vec![ImportOccurrence { + entry: ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_file: None, + resolved_path: Some(test_path("lib.jsonnet")), + }, + import_range: rowan::TextRange::new(start.into(), end.into()), + }] + ); +} + +#[test] +fn test_parse_local_import_single_quote() { + let code = "local lib = import 'lib.jsonnet'; lib"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let entries = parse_document_imports(&doc, &simple_resolver); + + assert_eq!( + entries, + vec![ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_file: None, + resolved_path: Some(test_path("lib.jsonnet")), + }] + ); +} + +#[test] +fn test_parse_multiple_imports() { + let code = r#" +local lib1 = import "lib1.jsonnet"; +local lib2 = import "lib2.jsonnet"; +lib1 + lib2 +"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let entries = parse_document_imports(&doc, &simple_resolver); + + assert_eq!( + entries, + vec![ + ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib1".to_string()), + import_path: "lib1.jsonnet".to_string(), + resolved_file: None, + resolved_path: Some(test_path("lib1.jsonnet")), + }, + ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib2".to_string()), + import_path: "lib2.jsonnet".to_string(), + resolved_file: None, + resolved_path: Some(test_path("lib2.jsonnet")), + }, + ] + ); +} + +#[test] +fn test_import_graph_update() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + graph.update_file(graph.intern(&main), &doc, simple_resolver); + + // Check that main imports lib + let imports = graph.imports(graph.intern(&main)); + let lib_file = graph.intern(&lib); + assert_eq!( + imports, + vec![ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_file: Some(lib_file), + resolved_path: Some(lib.clone()), + }] + ); + + // Check the reverse index + let importers = graph_paths(&graph, graph.direct_importers(graph.intern(&lib))); + assert_eq!(importers, vec![main]); +} + +#[test] +fn test_import_graph_lookups_with_equivalent_paths() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &doc, simple_resolver); + + let main_lookup = test_path("main.jsonnet"); + let lib_lookup = test_path("lib.jsonnet"); + + assert_eq!( + graph_paths(&graph, graph.direct_importers(graph.intern(&lib_lookup)),), + vec![main_lookup.clone()] + ); + assert_eq!( + graph.imports_of_target(graph.intern(&main_lookup), graph.intern(&lib_lookup),), + vec![&ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_file: Some(graph.intern(&lib_lookup)), + resolved_path: Some(lib_lookup), + }] + ); + assert_eq!( + graph.parse_mode(graph.intern(&main_lookup)), + Some(ImportParseMode::Precise) + ); +} + +#[test] +fn test_import_graph_remove() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + graph.update_file(graph.intern(&main), &doc, simple_resolver); + + // Remove main + graph.remove_file(graph.intern(&main)); + + // Check that main no longer has imports + assert!(graph.imports(graph.intern(&main)).is_empty()); + + // Check the reverse index is updated + let lib = test_path("lib.jsonnet"); + assert!(graph.direct_importers(graph.intern(&lib)).is_empty()); +} + +#[test] +fn test_transitive_importers() { + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main.jsonnet -> utils.jsonnet -> lib.jsonnet + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); + + // Check transitive importers of lib + let importers = graph_paths(&graph, graph.transitive_importers(graph.intern(&lib))); + assert_eq!(importers, vec![main, utils]); +} + +#[test] +fn test_imports_of_target() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + + let code = r#" +local lib = import "lib.jsonnet"; +local other = import "other.jsonnet"; +lib + other +"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &doc, simple_resolver); + + // Get imports of lib.jsonnet from main + let imports = graph.imports_of_target(graph.intern(&main), graph.intern(&lib)); + assert_eq!( + imports, + vec![&ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_file: Some(graph.intern(&lib)), + resolved_path: Some(lib), + }] + ); +} + +#[test] +fn test_resolved_import_maps_cached_and_updated() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + let text = test_path("data.txt"); + let code = r#" +local lib = import "lib.jsonnet"; +local text = importstr "data.txt"; +{ lib: lib, text: text } +"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let main_file = graph.intern(&main); + let lib_file = graph.intern(&lib); + let text_file = graph.intern(&text); + graph.update_file(main_file, &doc, simple_resolver); + + assert_eq!( + graph.resolved_import(main_file, "lib.jsonnet"), + Some(lib_file) + ); + assert_eq!( + graph.resolved_import(main_file, "data.txt"), + Some(text_file) + ); + + let code_imports = graph + .resolved_code_import_map(main_file) + .expect("code import cache should exist"); + assert_eq!(code_imports.get("lib.jsonnet"), Some(&lib_file)); + assert!(!code_imports.contains_key("data.txt")); + + let empty_doc = Document::new("{}".to_string(), DocVersion::new(2)); + graph.update_file(main_file, &empty_doc, simple_resolver); + assert!(graph.resolved_import_map(main_file).is_none()); + assert!(graph.resolved_code_import_map(main_file).is_none()); +} + +#[test] +fn test_files_with_unresolved_imports() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let unresolved = test_path("broken.jsonnet"); + + let unresolved_doc = Document::new( + r#"local x = import "missing.jsonnet"; x"#.to_string(), + DocVersion::new(1), + ); + graph.update_file(graph.intern(&unresolved), &unresolved_doc, |import| { + if import == "missing.jsonnet" { + None + } else { + simple_resolver(import) + } + }); + + let resolved_doc = Document::new( + r#"local lib = import "main.jsonnet"; lib"#.to_string(), + DocVersion::new(1), + ); + graph.update_file(graph.intern(&main), &resolved_doc, simple_resolver); + + let unresolved_files = graph_paths(&graph, graph.files_with_unresolved_imports()); + assert_eq!(unresolved_files, vec![unresolved]); +} + +#[test] +fn test_update_file_with_entries_mode_approximate() { + let mut graph = ImportGraph::new(PathStore::new()); + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + let main_file = graph.intern(&main); + + graph.update_file_with_entries_mode( + main_file, + vec![ImportEntry { + kind: ImportKind::Code, + binding_name: None, + import_path: "lib.jsonnet".to_string(), + resolved_file: Some(graph.intern(&lib)), + resolved_path: Some(lib), + }], + ImportParseMode::Approximate, + ); + + assert_eq!( + graph.parse_mode(main_file), + Some(ImportParseMode::Approximate) + ); + assert!(!graph.is_precise(main_file)); +} + +#[test] +fn test_topological_order_simple() { + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main -> utils -> lib (chain dependency) + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_code = "{}"; + let lib_doc = Document::new(lib_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let levels = graph + .topological_order() + .map(|levels| { + levels + .into_iter() + .map(|level| graph_paths(&graph, level)) + .collect::>() + }) + .expect("Should not have cycles"); + + // lib should be in first level (no deps) + // utils should be in second level (depends on lib) + // main should be in third level (depends on utils) + assert_eq!(levels, vec![vec![lib], vec![utils], vec![main]]); +} + +#[test] +fn test_topological_order_parallel_files() { + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main imports both utils1 and utils2 (independent) + let main = test_path("main.jsonnet"); + let utils1 = test_path("utils1.jsonnet"); + let utils2 = test_path("utils2.jsonnet"); + + // utils1 has no imports + let utils1_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils1), &utils1_doc, simple_resolver); + + // utils2 has no imports + let utils2_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils2), &utils2_doc, simple_resolver); + + // main imports both + let main_code = r#" +local u1 = import "utils1.jsonnet"; +local u2 = import "utils2.jsonnet"; +u1 + u2 +"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let levels = graph + .topological_order() + .map(|levels| { + levels + .into_iter() + .map(|level| graph_paths(&graph, level)) + .collect::>() + }) + .expect("Should not have cycles"); + + // utils1 and utils2 should be in first level (independent, can be parallel, sorted) + // main should be in second level + assert_eq!(levels, vec![vec![utils1, utils2], vec![main]]); +} + +#[test] +fn test_process_in_parallel() { + use std::sync::atomic::{AtomicUsize, Ordering}; + + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main -> lib (chain) + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + // main imports lib + let main_code = r#"local lib = import "lib.jsonnet"; lib"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let counter = AtomicUsize::new(0); + graph + .process_in_parallel(|_file| { + counter.fetch_add(1, Ordering::SeqCst); + }) + .expect("should process files in parallel"); + assert_eq!(counter.load(Ordering::SeqCst), 2); +} + +#[test] +fn test_process_in_parallel_order() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main -> lib (chain) + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + // main imports lib + let main_code = r#"local lib = import "lib.jsonnet"; lib"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let processed_order = Arc::new(Mutex::new(Vec::new())); + let order_clone = Arc::clone(&processed_order); + graph.process_in_parallel(move |file| { + order_clone + .lock() + .expect("processed_order mutex should not be poisoned") + .push(file); + }); + + let order = graph_paths( + &graph, + processed_order + .lock() + .expect("processed_order mutex should not be poisoned") + .clone(), + ); + // lib should be processed before main (lib has no deps, main depends on lib) + assert_eq!(order, vec![lib, main]); +} + +#[test] +fn test_process_with_dependencies() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main -> utils -> lib + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + // Process main and its dependencies + graph.process_with_dependencies( + graph.intern(&main), + |_| true, + move |file| { + processed_clone + .lock() + .expect("processed mutex should not be poisoned") + .push(file); + }, + ); + + let order = graph_paths( + &graph, + processed + .lock() + .expect("processed mutex should not be poisoned") + .clone(), + ); + + // lib should be processed before utils, utils before main + assert_eq!(order, vec![lib, utils, main]); +} + +#[test] +fn test_process_with_dependencies_filtered_by_kind() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let data = test_path("data.jsonnet"); + let script = test_path("script.k"); + + graph.update_file_with_entries( + graph.intern(&main), + vec![ + ImportEntry { + kind: ImportKind::Code, + binding_name: Some("data".to_string()), + import_path: "data.jsonnet".to_string(), + resolved_file: Some(graph.intern(&data)), + resolved_path: Some(data.clone()), + }, + ImportEntry { + kind: ImportKind::String, + binding_name: Some("payload".to_string()), + import_path: "script.k".to_string(), + resolved_file: Some(graph.intern(&script)), + resolved_path: Some(script), + }, + ], + ); + + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + graph.process_with_dependencies( + graph.intern(&main), + |entry| entry.kind == ImportKind::Code, + move |file| { + processed_clone + .lock() + .expect("processed mutex should not be poisoned") + .push(file); + }, + ); + + let order = graph_paths( + &graph, + processed + .lock() + .expect("processed mutex should not be poisoned") + .clone(), + ); + assert_eq!(order, vec![data, main]); +} + +#[test] +fn test_process_importers_with_work_queue() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main -> utils -> lib + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + // Process lib and its importers (cascade) + graph.process_importers_with_work_queue(graph.intern(&lib), move |file| { + processed_clone + .lock() + .expect("processed mutex should not be poisoned") + .push(file); + }); + + let order = graph_paths( + &graph, + processed + .lock() + .expect("processed mutex should not be poisoned") + .clone(), + ); + + // lib first, then utils (imports lib), then main (imports utils) + assert_eq!(order, vec![lib, utils, main]); +} + +#[test] +fn test_importer_levels_root_first() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + let utils_doc = Document::new( + r#"local lib = import "lib.jsonnet"; lib"#.to_string(), + DocVersion::new(1), + ); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); + + let main_doc = Document::new( + r#"local utils = import "utils.jsonnet"; utils"#.to_string(), + DocVersion::new(1), + ); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let levels = graph.importer_levels(graph.intern(&lib)); + let got = levels + .into_iter() + .map(|level| graph_paths(&graph, level)) + .collect::>(); + assert_eq!(got, vec![vec![lib], vec![utils], vec![main]]); +} + +#[test] +fn test_process_with_dependencies_unknown_root_is_noop() { + use std::sync::{Arc, Mutex}; + + let graph = ImportGraph::new(PathStore::new()); + let missing = test_path("missing.jsonnet"); + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + if let Some(root) = graph.file(&missing) { + graph.process_with_dependencies( + root, + |_| true, + move |file| { + processed_clone + .lock() + .expect("processed mutex should not be poisoned") + .push(file); + }, + ); + } + + assert_eq!( + *processed + .lock() + .expect("processed mutex should not be poisoned"), + Vec::::new() + ); +} diff --git a/crates/jrsonnet-lsp-import/src/graph/traversal.rs b/crates/jrsonnet-lsp-import/src/graph/traversal.rs new file mode 100644 index 00000000..7d4eb4c2 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/graph/traversal.rs @@ -0,0 +1,244 @@ +use jrsonnet_lsp_document::FileId; +use rustc_hash::FxHashMap; + +use super::{ImportEntry, ImportGraph}; +use crate::work_queue::{WorkQueue, WorkQueueExt}; + +impl ImportGraph { + /// Build dependency levels for a root file. + /// + /// Returns levels in dependency order (leaves first), with each inner level + /// safe to process in parallel. + pub fn dependency_levels

(&self, root: FileId, include_dependency: P) -> Vec> + where + P: Fn(&ImportEntry) -> bool + Sync, + { + let mut work = WorkQueue::new(); + work.push(root); + + work.run(|path_id, deps| { + // Get dependencies from import graph + if let Some(entries) = self.imports.get(path_id) { + for entry in entries { + if !include_dependency(entry) { + continue; + } + if let Some(resolved_id) = Self::resolved_entry_id(entry) { + deps.push(resolved_id); + } + } + } + }) + } + + /// Compute a topological ordering of files based on import dependencies. + /// + /// Returns files in an order where each file comes after all files it imports. + /// Files at the same "level" (no dependencies between them) can be processed + /// in parallel. + /// + /// Returns `None` if there's a cycle in the import graph. + #[must_use] + pub fn topological_order(&self) -> Option>> { + if self.imports.is_empty() { + return Some(Vec::new()); + } + + let mut in_degree = self.topological_in_degree(); + let mut current_level = Self::zero_in_degree(&in_degree); + if current_level.is_empty() { + return None; + } + + let mut levels: Vec> = Vec::new(); + let mut processed_count = 0usize; + + while !current_level.is_empty() { + // Sort for deterministic ordering + current_level.sort_unstable(); + + processed_count += current_level.len(); + let next_level = self.next_topological_level(¤t_level, &mut in_degree); + levels.push(current_level); + current_level = next_level; + } + + // Check if all files were processed (no cycles) + if processed_count != self.imports.len() { + return None; + } + + Some(levels) + } + + fn topological_in_degree(&self) -> FxHashMap { + let mut in_degree: FxHashMap = + self.imports.keys().map(|&file| (file, 0)).collect(); + + for (dependency, importers) in &self.imported_by { + if !in_degree.contains_key(dependency) { + continue; + } + + for importer in importers { + if let Some(degree) = in_degree.get_mut(importer) { + *degree += 1; + } + } + } + + in_degree + } + + fn zero_in_degree(in_degree: &FxHashMap) -> Vec { + in_degree + .iter() + .filter_map(|(&file, °ree)| (degree == 0).then_some(file)) + .collect() + } + + fn next_topological_level( + &self, + current_level: &[FileId], + in_degree: &mut FxHashMap, + ) -> Vec { + current_level + .iter() + .flat_map(|file| { + self.imported_by + .get(file) + .into_iter() + .flat_map(|importers| importers.iter().copied()) + }) + .filter_map(|importer| { + let degree = in_degree.get_mut(&importer)?; + if *degree == 0 { + return None; + } + *degree -= 1; + (*degree == 0).then_some(importer) + }) + .collect() + } + + /// Process files in topological order with parallel processing within each level. + /// + /// This computes a topological ordering of files based on import dependencies, + /// then processes each level in parallel. Files in the same level have no + /// dependencies on each other and can safely be processed concurrently. + /// + /// # Arguments + /// * `f` - Function to call for each file path. Must be `Sync` for parallel execution. + /// + /// # Returns + /// * `Some(())` if processing completed successfully + /// * `None` if there's a cycle in the import graph + /// + /// # Example + /// ```ignore + /// graph.process_in_parallel(|file| { + /// analyze_file(file); + /// }); + /// ``` + pub fn process_in_parallel(&self, f: F) -> Option<()> + where + F: Fn(FileId) + Sync, + { + use rayon::prelude::*; + + let levels = self.topological_order()?; + + // Process each level sequentially, but files within each level in parallel + for level in levels { + level.par_iter().copied().for_each(&f); + } + + Some(()) + } + + /// Process files in reverse topological order with parallel processing within each level. + /// + /// Similar to `process_in_parallel`, but processes files in reverse order - + /// files that are imported by others are processed last. This is useful when + /// you need to process dependents before their dependencies. + pub fn process_in_parallel_reverse(&self, f: F) -> Option<()> + where + F: Fn(FileId) + Sync, + { + use rayon::prelude::*; + + let levels = self.topological_order()?; + + // Process levels in reverse order + for level in levels.into_iter().rev() { + level.par_iter().copied().for_each(&f); + } + + Some(()) + } + + /// Process a file and its transitive dependencies using a work queue. + /// + /// This dynamically discovers dependencies during processing and ensures + /// dependencies are processed before dependents. Uses per-level parallelism. + /// + /// # Arguments + /// * `root` - The root file to process + /// * `f` - Function to call for each file path + /// + /// # Example + /// ```ignore + /// graph.process_with_dependencies(file, |_| true, |dep| { + /// analyze_file(dep); + /// }); + /// ``` + pub fn process_with_dependencies(&self, root: FileId, include_dependency: P, f: F) + where + F: Fn(FileId) + Sync, + P: Fn(&ImportEntry) -> bool + Sync, + { + let levels = self.dependency_levels(root, include_dependency); + + // Process levels in dependency order (leaves first) + levels.process_parallel(|path_id| f(*path_id)); + } + + /// Process a file and its transitive importers using a work queue. + /// + /// This processes files in reverse dependency order - the root file first, + /// then files that import it, and so on. Uses per-level parallelism. + /// + /// Useful for invalidation cascading: when a file changes, process it + /// and all files that depend on it. + #[must_use] + pub fn importer_levels(&self, root: FileId) -> Vec> { + let mut work = WorkQueue::new(); + work.push(root); + + let mut levels = work.run(|path_id, deps| { + // Get files that import this file. + for importer in self.direct_importers_by_id(*path_id) { + deps.push(importer); + } + }); + + // Work queue returns leaves first; importers should be processed root-first. + levels.reverse(); + levels + } + + /// Process a file and its transitive importers using a work queue. + /// + /// This processes files in reverse dependency order - the root file first, + /// then files that import it, and so on. Uses per-level parallelism. + /// + /// Useful for invalidation cascading: when a file changes, process it + /// and all files that depend on it. + pub fn process_importers_with_work_queue(&self, root: FileId, f: F) + where + F: Fn(FileId) + Sync, + { + self.importer_levels(root) + .process_parallel(|path_id| f(*path_id)); + } +} diff --git a/crates/jrsonnet-lsp-import/src/lib.rs b/crates/jrsonnet-lsp-import/src/lib.rs new file mode 100644 index 00000000..e7159c3d --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/lib.rs @@ -0,0 +1,23 @@ +//! Import graph and work queue for jrsonnet LSP. +//! +//! This crate provides: +//! - Import path extraction from AST nodes +//! - Import graph tracking dependencies between files +//! - Topological work queue for parallel processing + +pub mod graph; +pub mod parse; +pub mod resolve; +pub mod work_queue; + +pub use graph::{ + parse_document_import_occurrences, parse_document_import_occurrences_approximate, + parse_document_imports, parse_document_imports_approximate, ImportEntry, ImportGraph, + ImportKind, ImportOccurrence, ImportParseMode, +}; +pub use parse::{ + check_import_from_token, check_import_path, extract_import_path, find_import_in_node, + get_import_path_from_node, +}; +pub use resolve::ImportResolution; +pub use work_queue::{WorkQueue, WorkQueueExt}; diff --git a/crates/jrsonnet-lsp-import/src/parse.rs b/crates/jrsonnet-lsp-import/src/parse.rs new file mode 100644 index 00000000..870af0c0 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/parse.rs @@ -0,0 +1,195 @@ +//! Import path extraction from AST nodes. +//! +//! Utilities for extracting import paths from Jsonnet AST nodes. + +use jrsonnet_lsp_document::strip_string_quotes; +use jrsonnet_rowan_parser::{ + nodes::ExprImport, AstNode, AstToken, SyntaxKind, SyntaxNode, SyntaxToken, +}; + +/// Extract the path string from an import expression. +/// +/// This handles all string literal formats: double-quoted, single-quoted, +/// and verbatim strings (with @ prefix). +/// +/// # Example +/// ```ignore +/// // For `import "foo.jsonnet"` returns Some("foo.jsonnet") +/// // For `import @'bar.jsonnet'` returns Some("bar.jsonnet") +/// let path = extract_import_path(&import_expr); +/// ``` +#[must_use] +pub fn extract_import_path(import: &ExprImport) -> Option { + let text_token = import.text()?; + let text = text_token.text(); + Some(strip_string_quotes(text)) +} + +/// Find an import expression within a syntax node's descendants. +/// +/// Returns the first `ExprImport` found, or `None` if no import exists. +#[must_use] +pub fn find_import_in_node(node: &SyntaxNode) -> Option { + for descendant in node.descendants() { + if descendant.kind() == SyntaxKind::EXPR_IMPORT { + return ExprImport::cast(descendant); + } + } + None +} + +/// Check if a token is inside an import expression and extract the import path. +/// +/// This is useful for handling "go to definition" from within an import string. +/// Returns `Some((import_expr, path))` if the token is inside an import, `None` otherwise. +#[must_use] +pub fn check_import_from_token(token: &SyntaxToken) -> Option<(ExprImport, String)> { + // Must be a string token + let kind = token.kind(); + if !matches!( + kind, + SyntaxKind::STRING_DOUBLE + | SyntaxKind::STRING_SINGLE + | SyntaxKind::STRING_DOUBLE_VERBATIM + | SyntaxKind::STRING_SINGLE_VERBATIM + ) { + return None; + } + + // Walk up to find if this is inside an ExprImport + let mut node = token.parent()?; + loop { + if let Some(import) = ExprImport::cast(node.clone()) { + let path = extract_import_path(&import)?; + return Some((import, path)); + } + node = node.parent()?; + } +} + +/// Get the import path from a syntax node if it contains an import. +/// +/// This searches the node's descendants for an import expression and returns its path. +#[must_use] +pub fn get_import_path_from_node(node: &SyntaxNode) -> Option { + let import = find_import_in_node(node)?; + extract_import_path(&import) +} + +/// Check if a token is inside an import expression and return the import path. +/// +/// This is a convenience wrapper around [`check_import_from_token`] that just +/// returns the path string. +#[must_use] +pub fn check_import_path(token: &SyntaxToken) -> Option { + check_import_from_token(token).map(|(_, path)| path) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_rowan_parser::AstNode; + + use super::*; + + #[test] + fn test_extract_import_path() { + let code = r#"import "lib.jsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let import = find_import_in_node(ast.syntax()).expect("should find import expression"); + + let path = extract_import_path(&import); + assert_eq!(path, Some("lib.jsonnet".to_string())); + } + + #[test] + fn test_extract_import_path_verbatim() { + let code = r#"import @"lib.jsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let import = find_import_in_node(ast.syntax()).expect("should find import expression"); + + let path = extract_import_path(&import); + assert_eq!(path, Some("lib.jsonnet".to_string())); + } + + #[test] + fn test_find_import_in_node() { + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let _import = + find_import_in_node(ast.syntax()).expect("should find import in local binding"); + } + + #[test] + fn test_get_import_path_from_node() { + let code = r#"import "utils.jsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let path = get_import_path_from_node(ast.syntax()); + assert_eq!(path, Some("utils.jsonnet".to_string())); + } + + #[test] + fn test_check_import_path() { + let code = r#"import "foo.jsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the string token - there should be exactly one STRING_DOUBLE token + let string_token = ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .find(|t| t.kind() == SyntaxKind::STRING_DOUBLE) + .expect("should find STRING_DOUBLE token"); + + let path = check_import_path(&string_token); + assert_eq!(path, Some("foo.jsonnet".to_string())); + } + + #[test] + fn test_check_import_from_token() { + let code = r#"import "bar.jsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the string token - there should be exactly one STRING_DOUBLE token + let string_token = ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .find(|t| t.kind() == SyntaxKind::STRING_DOUBLE) + .expect("should find STRING_DOUBLE token"); + + let (import, path) = + check_import_from_token(&string_token).expect("should find import from token"); + assert_eq!(path, "bar.jsonnet"); + let _text = import.text().expect("import should have text"); + } + + #[test] + fn test_check_import_path_not_import() { + // A string that is NOT inside an import + let code = r#"local x = "not an import";"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the string token - there should be exactly one STRING_DOUBLE token + let string_token = ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .find(|t| t.kind() == SyntaxKind::STRING_DOUBLE) + .expect("should find STRING_DOUBLE token"); + + let path = check_import_path(&string_token); + assert_eq!(path, None); + } +} diff --git a/crates/jrsonnet-lsp-import/src/resolve/mod.rs b/crates/jrsonnet-lsp-import/src/resolve/mod.rs new file mode 100644 index 00000000..b7278bf1 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/resolve/mod.rs @@ -0,0 +1,32 @@ +//! Shared import path resolution utilities. +//! +//! This module centralizes path resolution rules so all LSP subsystems resolve +//! imports consistently. + +mod parse_adapter; +mod path_resolution; + +use std::path::PathBuf; + +use jrsonnet_lsp_document::CanonicalPath; + +/// Import-resolution boundary for one importing file. +/// +/// This captures the importing file path and effective import roots once, +/// then exposes a single API that callers can use for raw path resolution and +/// import parsing with consistent behavior. +#[derive(Debug, Clone, Copy)] +pub struct ImportResolution<'a> { + pub(super) importer_file: &'a CanonicalPath, + pub(super) import_roots: &'a [PathBuf], +} + +impl<'a> ImportResolution<'a> { + #[must_use] + pub fn new(importer_file: &'a CanonicalPath, import_roots: &'a [PathBuf]) -> Self { + Self { + importer_file, + import_roots, + } + } +} diff --git a/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs b/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs new file mode 100644 index 00000000..ad57632b --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs @@ -0,0 +1,129 @@ +use jrsonnet_lsp_document::{CanonicalPath, Document}; + +use super::{path_resolution::resolve_import_path, ImportResolution}; +use crate::graph::{ + parse_document_import_occurrences, parse_document_import_occurrences_approximate, + parse_document_imports, parse_document_imports_approximate, ImportEntry, ImportOccurrence, +}; + +impl ImportResolution<'_> { + #[must_use] + pub fn resolve(self, import_path: &str) -> Option { + resolve_import_path(self.importer_file, import_path, self.import_roots) + } + + #[must_use] + pub fn parse_entries(self, doc: &Document) -> Vec { + parse_document_imports(doc, &|import_path| self.resolve(import_path)) + } + + #[must_use] + pub fn parse_entries_approximate(self, doc: &Document) -> Vec { + parse_document_imports_approximate(doc, &|import_path| self.resolve(import_path)) + } + + #[must_use] + pub fn parse_occurrences(self, doc: &Document) -> Vec { + parse_document_import_occurrences(doc, &|import_path| self.resolve(import_path)) + } + + #[must_use] + pub fn parse_occurrences_approximate(self, doc: &Document) -> Vec { + parse_document_import_occurrences_approximate(doc, &|import_path| self.resolve(import_path)) + } +} + +#[cfg(test)] +mod tests { + use std::fs; + + use jrsonnet_lsp_document::DocVersion; + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_import_resolution_parse_entries() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer = root.join("main.jsonnet"); + let local_lib = root.join("lib.jsonnet"); + fs::write(&importer, "local lib = import \"lib.jsonnet\"; lib") + .expect("importer should be written"); + fs::write(&local_lib, "{}").expect("local lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let resolved_lib = CanonicalPath::new( + local_lib + .canonicalize() + .expect("local lib path should canonicalize"), + ); + let doc = Document::new( + "local lib = import \"lib.jsonnet\"; lib".to_string(), + DocVersion::new(1), + ); + let import_resolution = ImportResolution::new(&importer, &[]); + + assert_eq!( + import_resolution.parse_entries(&doc), + vec![ImportEntry { + kind: crate::graph::ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_file: None, + resolved_path: Some(resolved_lib), + }] + ); + } + + #[test] + fn test_import_resolution_parse_occurrences() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer = root.join("main.jsonnet"); + let local_lib = root.join("lib.jsonnet"); + let code = "local lib = import \"lib.jsonnet\"; lib"; + fs::write(&importer, code).expect("importer should be written"); + fs::write(&local_lib, "{}").expect("local lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let resolved_lib = CanonicalPath::new( + local_lib + .canonicalize() + .expect("local lib path should canonicalize"), + ); + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let import_resolution = ImportResolution::new(&importer, &[]); + + let start = u32::try_from( + code.find("\"lib.jsonnet\"") + .expect("import path should exist in source"), + ) + .expect("start offset should fit into u32"); + let end = start + + u32::try_from("\"lib.jsonnet\"".len()) + .expect("import path length should fit into u32"); + + assert_eq!( + import_resolution.parse_occurrences(&doc), + vec![ImportOccurrence { + entry: ImportEntry { + kind: crate::graph::ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_file: None, + resolved_path: Some(resolved_lib), + }, + import_range: rowan::TextRange::new(start.into(), end.into()), + }] + ); + } +} diff --git a/crates/jrsonnet-lsp-import/src/resolve/path_resolution.rs b/crates/jrsonnet-lsp-import/src/resolve/path_resolution.rs new file mode 100644 index 00000000..fce13929 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/resolve/path_resolution.rs @@ -0,0 +1,113 @@ +use std::path::Path; + +use jrsonnet_lsp_document::CanonicalPath; + +#[must_use] +pub(super) fn resolve_import_path( + importer_file: &CanonicalPath, + import_path: &str, + import_roots: &[std::path::PathBuf], +) -> Option { + resolve_import_path_from_base(importer_file.as_path(), import_path, import_roots) +} + +#[must_use] +fn resolve_import_path_from_base( + base_file: &Path, + import_path: &str, + import_roots: &[std::path::PathBuf], +) -> Option { + let import = Path::new(import_path); + + if import.is_absolute() { + return canonical_if_exists(import); + } + + if let Some(parent) = base_file.parent() { + let relative = parent.join(import); + if let Some(canonical) = canonical_if_exists(&relative) { + return Some(canonical); + } + } + + for root in import_roots { + let candidate = root.join(import); + if let Some(canonical) = canonical_if_exists(&candidate) { + return Some(canonical); + } + } + + None +} + +fn canonical_if_exists(path: &Path) -> Option { + path.canonicalize().ok().map(CanonicalPath::new) +} + +#[cfg(test)] +mod tests { + use std::fs; + + use tempfile::TempDir; + + use super::*; + use crate::resolve::ImportResolution; + + #[test] + fn test_resolve_relative_first() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer = root.join("main.jsonnet"); + let local_lib = root.join("lib.jsonnet"); + fs::write(&importer, "import \"lib.jsonnet\"").expect("importer should be written"); + fs::write(&local_lib, "{}").expect("local lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let import_resolution = ImportResolution::new(&importer, &[]); + let resolved = import_resolution.resolve("lib.jsonnet"); + assert_eq!( + resolved, + Some(CanonicalPath::new( + local_lib + .canonicalize() + .expect("local lib path should canonicalize") + )) + ); + } + + #[test] + fn test_resolve_from_import_roots() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer_dir = root.join("app"); + let jpath_dir = root.join("vendor"); + fs::create_dir_all(&importer_dir).expect("importer directory should be created"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + + let importer = importer_dir.join("main.jsonnet"); + let shared_lib = jpath_dir.join("shared.libsonnet"); + fs::write(&importer, "import \"shared.libsonnet\"").expect("importer should be written"); + fs::write(&shared_lib, "{}").expect("shared lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let import_roots = vec![jpath_dir]; + let import_resolution = ImportResolution::new(&importer, &import_roots); + let resolved = import_resolution.resolve("shared.libsonnet"); + assert_eq!( + resolved, + Some(CanonicalPath::new( + shared_lib + .canonicalize() + .expect("shared lib path should canonicalize") + )) + ); + } +} diff --git a/crates/jrsonnet-lsp-import/src/work_queue.rs b/crates/jrsonnet-lsp-import/src/work_queue.rs new file mode 100644 index 00000000..86a9db1a --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/work_queue.rs @@ -0,0 +1,398 @@ +//! Work queue for topological processing with per-level parallelization. +//! +//! This module provides a work queue that processes items in topological order, +//! grouping items by dependency level. Items at the same level have no dependencies +//! on each other and can be processed in parallel. +//! +//! # Pattern +//! +//! 1. Push initial work items +//! 2. Process items via visitor pattern - visitor can push dependencies +//! 3. Items are grouped by dependency level (leaves at level 0) +//! 4. Caller processes each level: parallel within level, sequential between levels +//! +//! # Example +//! +//! ```ignore +//! let mut work = WorkQueue::new(); +//! work.push(root_item); +//! +//! let levels = work.run(|item, queue| { +//! // Process item, push dependencies +//! for dep in get_dependencies(item) { +//! queue.push(dep); +//! } +//! }); +//! +//! // Process levels with rayon +//! for level in levels { +//! level.into_par_iter().for_each(|item| process(item)); +//! } +//! ``` + +use std::hash::Hash; + +use rustc_hash::{FxHashMap, FxHashSet}; + +/// Action in the work queue stack. +#[derive(Debug)] +enum Action { + /// Enter processing for this item (DFS pre-order). + Enter(T), + /// Exit processing for this item (DFS post-order). + Exit(T), +} + +/// Work queue for topological processing. +/// +/// Uses depth-first traversal to discover dependencies and group items by level. +/// Level is computed as: max(dependency levels) + 1, with leaves at level 0. +#[derive(Debug)] +pub struct WorkQueue { + /// Stack of actions (LIFO for DFS). + actions: Vec>, + /// Computed levels for processed items. + item_levels: FxHashMap, + /// Track which items are currently being processed (for cycle detection). + in_progress: FxHashSet, + /// Track items already queued for processing. + queued: FxHashSet, +} + +impl Default for WorkQueue +where + T: Eq + Hash, +{ + fn default() -> Self { + Self::new() + } +} + +impl WorkQueue +where + T: Eq + Hash, +{ + /// Create a new empty work queue. + #[must_use] + pub fn new() -> Self { + Self { + actions: Vec::new(), + item_levels: FxHashMap::default(), + in_progress: FxHashSet::default(), + queued: FxHashSet::default(), + } + } + + /// Push an item to be processed. + /// + /// Items are deduplicated - pushing the same item twice has no effect. + pub fn push(&mut self, item: T) + where + T: Clone, + { + // Skip if already processed, in progress, or already queued. + if self.item_levels.contains_key(&item) + || self.in_progress.contains(&item) + || self.queued.contains(&item) + { + return; + } + + self.queued.insert(item.clone()); + self.actions.push(Action::Enter(item)); + } + + /// Get the computed level for an item (if already processed). + fn get_level(&self, item: &T) -> Option { + self.item_levels.get(item).copied() + } + + /// Run the work queue with a visitor function. + /// + /// The visitor is called for each item and can push dependencies via the queue. + /// Returns items grouped by dependency level - items at level 0 have no dependencies, + /// items at level 1 depend only on level 0 items, etc. + /// + /// # Arguments + /// + /// * `visitor` - Function called for each item. Receives the item and a mutable + /// reference to a collector for dependencies. + /// + /// # Returns + /// + /// Vector of levels, where each level is a vector of items that can be processed + /// in parallel. + pub fn run(mut self, mut visitor: F) -> Vec> + where + T: Clone, + F: FnMut(&T, &mut Vec), + { + // Track dependencies for each item to compute levels + let mut item_deps: FxHashMap> = FxHashMap::default(); + + while let Some(action) = self.actions.pop() { + match action { + Action::Enter(item) => { + if self.item_levels.contains_key(&item) { + continue; // Already processed + } + self.queued.remove(&item); + + // Mark as in progress + self.in_progress.insert(item.clone()); + + // Schedule exit + self.actions.push(Action::Exit(item.clone())); + + // Collect dependencies + let mut deps = Vec::new(); + visitor(&item, &mut deps); + + // Push dependencies onto stack + for dep in &deps { + self.push(dep.clone()); + } + + // Store deps for level computation + item_deps.insert(item, deps); + } + Action::Exit(item) => { + self.in_progress.remove(&item); + + // Compute level: max(dep levels) + 1, or 0 if no deps + let deps = item_deps + .get(&item) + .map_or(&[][..], std::vec::Vec::as_slice); + let max_dep_level = deps + .iter() + .filter_map(|dep| self.get_level(dep)) + .max() + .unwrap_or(0); + + let level = if deps.is_empty() { + 0 + } else { + max_dep_level + 1 + }; + + self.item_levels.insert(item, level); + } + } + } + + // Group items by level + if self.item_levels.is_empty() { + return Vec::new(); + } + + let max_level = self.item_levels.values().max().copied().unwrap_or(0); + let mut levels: Vec> = vec![Vec::new(); max_level + 1]; + + for (item, level) in self.item_levels { + if let Some(items_at_level) = levels.get_mut(level) { + items_at_level.push(item); + } + } + + levels + } + + /// Check if an item has already been processed. + pub fn contains(&self, item: &T) -> bool { + self.item_levels.contains_key(item) + } +} + +/// Extension trait for processing work queue results with rayon. +pub trait WorkQueueExt { + /// Process each level in parallel using rayon. + /// + /// Items within each level are processed in parallel. + /// Levels are processed sequentially (barrier between levels). + fn process_parallel(&self, f: F) + where + F: Fn(&T) + Sync, + T: Sync; + + /// Process each level in parallel with mutable accumulator. + /// + /// Each level is processed in parallel, then results are combined sequentially. + fn process_parallel_with(&self, init: A, process: F, combine: C) -> A + where + F: Fn(&T) -> A + Sync, + C: Fn(&mut A, A), + A: Send, + T: Sync; +} + +impl WorkQueueExt for Vec> { + fn process_parallel(&self, f: F) + where + F: Fn(&T) + Sync, + T: Sync, + { + use rayon::prelude::*; + + for level in self { + level.par_iter().for_each(&f); + } + } + + fn process_parallel_with(&self, mut acc: A, process: F, combine: C) -> A + where + F: Fn(&T) -> A + Sync, + C: Fn(&mut A, A), + A: Send, + T: Sync, + { + use rayon::prelude::*; + + for level in self { + let level_results: Vec = level.par_iter().map(&process).collect(); + for result in level_results { + combine(&mut acc, result); + } + } + + acc + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use super::*; + + #[test] + fn test_empty_queue() { + let work: WorkQueue = WorkQueue::new(); + let levels = work.run(|_, _| {}); + + assert_eq!(levels, Vec::>::new()); + } + + #[test] + fn test_single_item() { + let mut work = WorkQueue::new(); + work.push(1); + + let levels = work.run(|_, _| {}); + + assert_eq!(levels, vec![vec![1]]); + } + + #[test] + fn test_chain_dependency() { + // 1 depends on 2, 2 depends on 3 + // Expected levels: [3], [2], [1] + let deps: HashMap> = [(1, vec![2]), (2, vec![3]), (3, vec![])] + .into_iter() + .collect(); + + let mut work = WorkQueue::new(); + work.push(1); + + let levels = work.run(|item, dep_collector| { + if let Some(item_deps) = deps.get(item) { + dep_collector.extend(item_deps.iter().copied()); + } + }); + + assert_eq!(levels, vec![vec![3], vec![2], vec![1]]); + } + + #[test] + fn test_parallel_items() { + // 1 depends on 2 and 3 (2 and 3 are independent) + // Expected levels: [2, 3] (order may vary), [1] + let deps: HashMap> = [(1, vec![2, 3]), (2, vec![]), (3, vec![])] + .into_iter() + .collect(); + + let mut work = WorkQueue::new(); + work.push(1); + + let levels = work.run(|item, dep_collector| { + if let Some(item_deps) = deps.get(item) { + dep_collector.extend(item_deps.iter().copied()); + } + }); + + let normalized_levels: Vec> = levels + .into_iter() + .map(|mut level| { + level.sort_unstable(); + level + }) + .collect(); + assert_eq!(normalized_levels, vec![vec![2, 3], vec![1]]); + } + + #[test] + fn test_diamond_dependency() { + // 1 depends on 2 and 3, both 2 and 3 depend on 4 + // Expected: [4], [2, 3], [1] + let deps: HashMap> = + [(1, vec![2, 3]), (2, vec![4]), (3, vec![4]), (4, vec![])] + .into_iter() + .collect(); + + let mut work = WorkQueue::new(); + work.push(1); + + let levels = work.run(|item, dep_collector| { + if let Some(item_deps) = deps.get(item) { + dep_collector.extend(item_deps.iter().copied()); + } + }); + + let normalized_levels: Vec> = levels + .into_iter() + .map(|mut level| { + level.sort_unstable(); + level + }) + .collect(); + assert_eq!(normalized_levels, vec![vec![4], vec![2, 3], vec![1]]); + } + + #[test] + fn test_deduplication() { + // Push same item twice - should only appear once + let mut work = WorkQueue::new(); + work.push(1); + work.push(1); + work.push(2); + work.push(2); + + let levels = work.run(|_, _| {}); + + // Should have 2 items total, not 4 + let total: usize = levels.iter().map(std::vec::Vec::len).sum(); + assert_eq!(total, 2); + } + + #[test] + fn test_process_parallel() { + use std::sync::atomic::{AtomicUsize, Ordering}; + + let levels = vec![vec![1, 2, 3], vec![4, 5]]; + + let counter = AtomicUsize::new(0); + levels.process_parallel(|_| { + counter.fetch_add(1, Ordering::SeqCst); + }); + + assert_eq!(counter.load(Ordering::SeqCst), 5); + } + + #[test] + fn test_process_parallel_with() { + let levels = vec![vec![1, 2, 3], vec![4, 5]]; + + let sum = levels.process_parallel_with(0, |&x| x, |acc, x| *acc += x); + + assert_eq!(sum, 15); // 1 + 2 + 3 + 4 + 5 + } +} diff --git a/crates/jrsonnet-lsp-inference/Cargo.toml b/crates/jrsonnet-lsp-inference/Cargo.toml new file mode 100644 index 00000000..9a066256 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "jrsonnet-lsp-inference" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Type inference and flow analysis for jrsonnet LSP" + +[dependencies] +dashmap = "6" +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-lsp-import = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-import" } +jrsonnet-lsp-scope = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scope" } +jrsonnet-lsp-stdlib = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-stdlib" } +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +jrsonnet-std-sig = { version = "0.5.0-pre97", path = "../jrsonnet-std-sig" } +lru.workspace = true +lsp-types.workspace = true +moka = { version = "0.12", features = ["sync"] } +parking_lot.workspace = true +rayon = "1.11.0" +rowan.workspace = true +rustc-hash.workspace = true +strsim.workspace = true +tracing = "0.1.44" + +[dev-dependencies] +assert_matches = "1.5.0" +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +rstest = "0.23" + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-inference/src/analysis/build.rs b/crates/jrsonnet-lsp-inference/src/analysis/build.rs new file mode 100644 index 00000000..caa6373e --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/analysis/build.rs @@ -0,0 +1,94 @@ +use std::sync::Arc; + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore, MutStore, Ty, TySubst}; +use parking_lot::RwLock; +use rowan::TextRange; +use rustc_hash::FxHashMap; + +use super::TypeAnalysis; +use crate::{ + env::{ImportResolver, TypeEnv}, + expr::infer_expr_ty_and_record, +}; + +impl TypeAnalysis { + /// Analyze a document and return the type analysis results. + #[must_use] + pub fn analyze(document: &Document) -> Self { + Self::analyze_with_global(document, Arc::new(GlobalTyStore::new())) + } + + /// Analyze a document with a shared global store. + pub fn analyze_with_global(document: &Document, global: Arc) -> Self { + let mut env = TypeEnv::new(Arc::clone(&global)); + let ast = document.ast(); + + let mut expr_types = FxHashMap::default(); + + let doc_ty = ast.expr().map_or(Ty::ANY, |expr| { + analyze_and_record(&expr, &mut env, &mut expr_types) + }); + + Self::finalize_analysis(global, env.into_store(), expr_types, doc_ty) + } + + /// Analyze a document with a shared global store and import resolver. + pub fn analyze_with_resolver( + document: &Document, + global: Arc, + import_resolver: Arc, + ) -> Self { + let mut env = TypeEnv::with_import_resolver(Arc::clone(&global), import_resolver); + let ast = document.ast(); + + let mut expr_types = FxHashMap::default(); + + let doc_ty = ast.expr().map_or(Ty::ANY, |expr| { + analyze_and_record(&expr, &mut env, &mut expr_types) + }); + + Self::finalize_analysis(global, env.into_store(), expr_types, doc_ty) + } + + fn finalize_analysis( + global: Arc, + store: MutStore, + mut expr_types: FxHashMap, + document_type: Ty, + ) -> Self { + let local = store.into_local(); + let subst = TySubst::merge(global.as_ref(), &local); + + let map_ty = |ty: Ty| { + let mapped = subst.apply(ty); + if mapped.is_local() { + Ty::ANY + } else { + mapped + } + }; + + for ty in expr_types.values_mut() { + *ty = map_ty(*ty); + } + + let document_type = map_ty(document_type); + let document_type = GlobalTy::new(document_type).unwrap_or(GlobalTy::ANY); + + Self { + store: RwLock::new(MutStore::new(global)), + expr_types, + document_type, + } + } +} + +/// Analyze an expression and record types for it and all sub-expressions. +fn analyze_and_record( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expr_types: &mut FxHashMap, +) -> Ty { + infer_expr_ty_and_record(expr, env, None, expr_types) +} diff --git a/crates/jrsonnet-lsp-inference/src/analysis/mod.rs b/crates/jrsonnet-lsp-inference/src/analysis/mod.rs new file mode 100644 index 00000000..73e81615 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/analysis/mod.rs @@ -0,0 +1,82 @@ +//! Type analysis storage and queries. +//! +//! Provides eager type analysis for Jsonnet documents. Analysis is computed +//! once during construction and results are immutable, making `TypeAnalysis` +//! safe to share across threads and cache in concurrent data structures. + +mod build; +mod queries; + +use std::sync::Arc; + +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore, MutStore, Ty}; +use parking_lot::RwLock; +use rowan::TextRange; +use rustc_hash::FxHashMap; + +/// Stores inferred types for all expressions, allowing queries by position. +/// +/// Internally uses interned `Ty` references for memory efficiency. +/// Analysis is computed eagerly during construction. +/// +/// This type is `Send + Sync` and can be safely shared across threads +/// and cached in concurrent data structures like moka. +pub struct TypeAnalysis { + /// Type store for interning and looking up types. + /// Uses `RwLock` because some query operations (like union) may intern new types. + store: RwLock, + /// Map from expression text range to interned type. + /// Immutable after construction. + expr_types: FxHashMap, + /// The inferred type of the document's root expression. + document_type: GlobalTy, +} + +impl std::fmt::Debug for TypeAnalysis { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TypeAnalysis") + .field("store", &">") + .field("expr_types_count", &self.expr_types.len()) + .field("document_type", &self.document_type) + .finish() + } +} + +impl Default for TypeAnalysis { + fn default() -> Self { + Self::new() + } +} + +impl TypeAnalysis { + /// Create a new empty type analysis with a default global store. + #[must_use] + pub fn new() -> Self { + Self { + store: RwLock::new(MutStore::new(Arc::new(GlobalTyStore::new()))), + expr_types: FxHashMap::default(), + document_type: GlobalTy::ANY, + } + } + + /// Create a new empty type analysis with a specific global store. + pub fn with_global(global: Arc) -> Self { + Self { + store: RwLock::new(MutStore::new(global)), + expr_types: FxHashMap::default(), + document_type: GlobalTy::ANY, + } + } + + /// Get the inferred type of the document's root expression. + #[inline] + pub fn document_type(&self) -> Ty { + self.document_type.into() + } + + /// Get the inferred type of the document's root expression as a global type. + #[inline] + pub fn document_type_global(&self) -> GlobalTy { + self.document_type + } +} diff --git a/crates/jrsonnet-lsp-inference/src/analysis/queries.rs b/crates/jrsonnet-lsp-inference/src/analysis/queries.rs new file mode 100644 index 00000000..f7a506d9 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/analysis/queries.rs @@ -0,0 +1,792 @@ +use jrsonnet_lsp_types::{ + is_subtype_ty, DisplayContext, FunctionData, MutStore, ObjectData, Ty, TyData, +}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, FieldName}, + AstNode, SyntaxNode, SyntaxToken, +}; +use rowan::TextRange; +use rustc_hash::FxHashMap; + +use super::TypeAnalysis; + +impl TypeAnalysis { + /// Get the type of an expression at a specific position. + /// + /// Finds the smallest expression containing the position and returns its type. + pub fn type_at_position(&self, root: &SyntaxNode, offset: rowan::TextSize) -> Option { + self.find_type_at_in_syntax(root, offset) + .or_else(|| self.find_type_at(offset)) + } + + /// Find the smallest expression type containing `offset` by walking syntax ancestors. + fn find_type_at_in_syntax(&self, root: &SyntaxNode, offset: rowan::TextSize) -> Option { + let update_best = |best: &mut Option<(TextRange, Ty)>, range: TextRange, ty: Ty| match best + { + None => *best = Some((range, ty)), + Some((best_range, _)) if range.len() < best_range.len() => { + *best = Some((range, ty)); + } + _ => {} + }; + + let candidate_for_token = |token: SyntaxToken| { + let mut best: Option<(TextRange, Ty)> = self.contextual_type_for_token(&token); + + if let Some(ty) = self.expr_types.get(&token.text_range()).copied() { + update_best(&mut best, token.text_range(), ty); + } + + let mut current = token.parent(); + while let Some(node) = current { + let range = node.text_range(); + if let Some(ty) = self.expr_types.get(&range).copied() { + update_best(&mut best, range, ty); + } + current = node.parent(); + } + + best + }; + + let best = match root.token_at_offset(offset) { + rowan::TokenAtOffset::None => None, + rowan::TokenAtOffset::Single(token) => candidate_for_token(token), + rowan::TokenAtOffset::Between(left, right) => { + match (candidate_for_token(left), candidate_for_token(right)) { + (None, None) => None, + (Some(candidate), None) | (None, Some(candidate)) => Some(candidate), + (Some(left), Some(right)) => Some(if left.0.len() <= right.0.len() { + left + } else { + right + }), + } + } + }; + + best.map(|(_, ty)| ty) + } + + /// Infer contextual type for non-expression nodes that users still hover. + /// + /// Examples: + /// - Object field names (`a: 1`, `"a": 1`, `[expr]: 1`) + /// - Local binding definition names (`local x = value`) + fn contextual_type_for_token(&self, token: &SyntaxToken) -> Option<(TextRange, Ty)> { + let node = token.parent()?; + let token_range = token.text_range(); + + if let Some(field_name) = node.ancestors().find_map(FieldName::cast) { + return match field_name { + FieldName::FieldNameFixed(fixed) => Some((fixed.syntax().text_range(), Ty::STRING)), + FieldName::FieldNameDynamic(dynamic) => { + let expr = dynamic.expr()?; + let ty = self.type_for_range(expr.syntax().text_range())?; + Some((dynamic.syntax().text_range(), ty)) + } + }; + } + + let bind = node.ancestors().find_map(Bind::cast)?; + match bind { + Bind::BindDestruct(bind_destruct) => { + let value = bind_destruct.value()?; + let value_range = value.syntax().text_range(); + if value_range.start() <= token_range.start() + && token_range.end() <= value_range.end() + { + return None; + } + let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind_destruct)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let name_range = full.name()?.syntax().text_range(); + let assign_range = bind_destruct.assign_token().map(|token| token.text_range()); + if token_range != name_range && assign_range != Some(token_range) { + return None; + } + let ty = self.type_for_range(value_range)?; + Some((value_range, ty)) + } + Bind::BindFunction(_) => None, + } + } + + /// Find a type at the given offset. + fn find_type_at(&self, offset: rowan::TextSize) -> Option { + let mut best_range: Option = None; + + for range in self.expr_types.keys() { + if range.contains(offset) { + match best_range { + None => best_range = Some(*range), + Some(current) if range.len() < current.len() => best_range = Some(*range), + _ => {} + } + } + } + + best_range.and_then(|r| self.expr_types.get(&r).copied()) + } + + /// Get the type of an expression by its exact range. + pub fn type_for_range(&self, range: TextRange) -> Option { + self.expr_types.get(&range).copied() + } + + /// Display a Ty as a string. + pub fn display(&self, ty: Ty) -> String { + self.store.read().display(ty) + } + + /// Display a Ty in detailed form suitable for hover text. + pub fn display_for_hover(&self, ty: Ty) -> String { + let store = self.store.read(); + let rendered = { + let mut context = DisplayContext::detailed(&*store); + // Keep hover output readable for very large nested types. + context.max_depth = 6; + context.max_items = 8; + context.max_union_members = 8; + context.format(ty) + }; + drop(store); + rendered + } + + /// Check if `subtype` is a subtype of `supertype`. + pub fn is_subtype(&self, subtype: Ty, supertype: Ty) -> bool { + is_subtype_ty(&*self.store.read(), subtype, supertype) + } + + /// Access the type data for a Ty via closure (avoids cloning). + pub fn with_data(&self, ty: Ty, f: impl FnOnce(&TyData) -> R) -> R { + f(&self.store.read().get(ty)) + } + + /// Create a union type from multiple Ty values. + pub fn union(&self, types: Vec) -> Ty { + self.store.write().union(types) + } + + /// Get all known fields for an object type at a position. + /// + /// Returns field names and their interned types, useful for completion. + pub fn fields_at_position( + &self, + root: &SyntaxNode, + offset: rowan::TextSize, + ) -> Option> { + let ty = self.type_at_position(root, offset)?; + self.extract_fields(ty) + } + + /// Get all known fields for the expression that exactly matches `range`. + /// + /// This is useful when callers have already identified a concrete expression + /// node and want to avoid position-based ambiguity. + pub fn fields_for_range(&self, range: TextRange) -> Option> { + let ty = self.type_for_range(range)?; + self.extract_fields(ty) + } + + /// Extract fields from a type (handles unions by merging fields). + fn extract_fields(&self, ty: Ty) -> Option> { + let ty_data = self.store.read().get(ty); + + match ty_data { + TyData::Object(obj) => { + let fields: Vec<_> = obj.fields.iter().map(|(k, v)| (k.clone(), v.ty)).collect(); + if fields.is_empty() && !obj.has_unknown { + None + } else { + Some(fields) + } + } + TyData::Union(types) => { + // Merge fields from all object types in the union + let mut all_fields: FxHashMap> = FxHashMap::default(); + for t in types { + if let Some(fields) = self.extract_fields(t) { + for (name, field_ty) in fields { + all_fields.entry(name).or_default().push(field_ty); + } + } + } + if all_fields.is_empty() { + None + } else { + // Create union types for fields that have multiple types + let result: Vec<_> = all_fields + .into_iter() + .map(|(name, tys)| { + let unified_ty = self.store.write().union(tys); + (name, unified_ty) + }) + .collect(); + Some(result) + } + } + _ => None, + } + } + + /// Check if a type supports indexing (array, object, string, tuple). + #[inline] + pub fn is_indexable(&self, ty: Ty) -> bool { + self.store.read().is_indexable(ty) + } + + /// Check if a type supports field access (object or `attrs_of`). + #[inline] + pub fn supports_field_access(&self, ty: Ty) -> bool { + self.store.read().supports_field_access(ty) + } + + /// Check if a type is callable (function). + #[inline] + pub fn is_callable(&self, ty: Ty) -> bool { + self.store.read().is_callable(ty) + } + + /// Access the type store directly (for complex operations). + pub fn store(&self) -> parking_lot::RwLockReadGuard<'_, MutStore> { + self.store.read() + } + + /// Execute a closure with mutable access to the type store. + pub fn with_store_mut(&self, f: impl FnOnce(&mut MutStore) -> R) -> R { + f(&mut self.store.write()) + } + + /// Execute a function with immutable access to the `MutStore`. + pub fn with_store(&self, f: impl FnOnce(&MutStore) -> R) -> R { + f(&self.store.read()) + } + + /// Import a type from the stdlib store into this analysis store. + pub fn import_from_stdlib(&self, ty: Ty) -> Ty { + jrsonnet_lsp_stdlib::import_ty_from_stdlib(&mut self.store.write(), ty) + } + + /// Get the type data for a Ty, cloning it for pattern matching. + #[inline] + pub fn get_data(&self, ty: Ty) -> TyData { + self.store.read().get(ty) + } + + /// Get object data if the type is an Object. + pub fn get_object(&self, ty: Ty) -> Option { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Object(obj) => Some(obj), + _ => None, + } + } + + /// Get tuple element types if the type is a Tuple. + pub fn get_tuple(&self, ty: Ty) -> Option> { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Tuple { elems } => Some(elems), + _ => None, + } + } + + /// Get function data if the type is a Function. + pub fn get_function(&self, ty: Ty) -> Option { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Function(func) => Some(func), + _ => None, + } + } + + /// Check if a type is an object and has known (closed) fields. + #[inline] + pub fn is_closed_object(&self, ty: Ty) -> bool { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Object(obj) => !obj.has_unknown, + _ => false, + } + } + + /// Get the length of a tuple type. + #[inline] + pub fn tuple_len(&self, ty: Ty) -> Option { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Tuple { elems } => Some(elems.len()), + _ => None, + } + } + + /// Check if an object type has a specific field. + pub fn object_has_field(&self, ty: Ty, field_name: &str) -> Option { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Object(obj) => Some(obj.fields.iter().any(|(name, _)| name == field_name)), + _ => None, + } + } + + /// Get all field names from an object type. + pub fn object_field_names(&self, ty: Ty) -> Option> { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Object(obj) => Some(obj.fields.iter().map(|(name, _)| name.clone()).collect()), + _ => None, + } + } + + /// Check if a type supports slicing (array, tuple, or string). + #[inline] + pub fn is_sliceable(&self, ty: Ty) -> bool { + if ty.is_any() { + return true; + } + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Array { .. } + | TyData::Tuple { .. } + | TyData::String + | TyData::Char + | TyData::LiteralString(_) => true, + TyData::Union(types) => types.iter().all(|&t| self.is_sliceable(t)), + _ => false, + } + } + + /// Check if a type is an array type. + #[inline] + pub fn is_array(&self, ty: Ty) -> bool { + let ty_data = self.store.read().get(ty); + matches!(ty_data, TyData::Array { .. }) + } + + /// Check if a type is a string type (including Char and `LiteralString`). + #[inline] + pub fn is_string(&self, ty: Ty) -> bool { + let ty_data = self.store.read().get(ty); + matches!( + ty_data, + TyData::String | TyData::Char | TyData::LiteralString(_) + ) + } + + /// Check if a type is a function. + #[inline] + pub fn is_function(&self, ty: Ty) -> bool { + let ty_data = self.store.read().get(ty); + matches!(ty_data, TyData::Function(_)) + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; + use jrsonnet_rowan_parser::AstNode; + + use super::*; + use crate::ImportResolver; + + fn analyze_doc(code: &str) -> (TypeAnalysis, Document) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze(&doc); + (analysis, doc) + } + + #[derive(Debug)] + struct NoopImportResolver; + + impl ImportResolver for NoopImportResolver { + fn resolve_import(&self, _import_path: &str) -> Option { + None + } + } + + fn analyze_doc_with_resolver(code: &str) -> (TypeAnalysis, Document) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze_with_resolver( + &doc, + Arc::new(GlobalTyStore::new()), + Arc::new(NoopImportResolver), + ); + (analysis, doc) + } + + #[test] + fn test_type_at_position_finds_smallest() { + let code = "{ a: 1 + 2 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + // Position 5 is inside "1" + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(5)) + .expect("should find type at position 5"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_type_at_position_nested_expressions() { + let code = "[[1]]"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + // Position 2 is at the inner "1" + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(2)) + .expect("should find type at position 2"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_type_at_position_field_name_fixed_is_string() { + let code = "{ key: 1 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let key_offset = nth_offset(code, "key", 0); + let ty = analysis + .type_at_position(&root, key_offset) + .expect("should find type at fixed field name"); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_type_at_position_field_name_dynamic_uses_expr_type() { + let code = r#"{ [("x" + "y")]: 1 }"#; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let l_bracket_offset = nth_offset(code, "[", 0); + let ty = analysis + .type_at_position(&root, l_bracket_offset) + .expect("should find type at computed field name"); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_type_at_position_object_local_bind_name_uses_value_type() { + let code = r"{ local x = { a: 1 }, y: x }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let bind_name_offset = nth_offset(code, "x = {", 0); + let ty = analysis + .type_at_position(&root, bind_name_offset) + .expect("should find type at object-local bind name"); + + analysis.with_data(ty, |data| { + assert!( + matches!(data, TyData::Object(_)), + "expected object type for bind name, got {:?}", + data + ); + let TyData::Object(obj) = data else { + return; + }; + assert!(!obj.has_unknown); + assert_eq!(obj.fields.len(), 1); + assert_eq!(obj.fields[0].0, "a"); + assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); + }); + } + + #[test] + fn test_type_at_position_local_array_destruct_bind_name_uses_element_type() { + let code = "local [a, b] = [1, 2]; a + b"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let bind_name_offset = nth_offset(code, "a", 0); + let ty = analysis + .type_at_position(&root, bind_name_offset) + .expect("should find type at array-destruct bind name"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_type_at_position_local_object_destruct_bind_name_uses_field_type() { + let code = "local { foo: x } = { foo: 1 }; x"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let bind_name_offset = nth_offset(code, "x", 0); + let ty = analysis + .type_at_position(&root, bind_name_offset) + .expect("should find type at object-destruct bind name"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_type_at_position_object_local_bind_equals_uses_value_type() { + let code = r"{ local x = { a: 1 }, y: x }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let equals_offset = nth_offset(code, "=", 0); + let ty = analysis + .type_at_position(&root, equals_offset) + .expect("should find type at object-local bind equals token"); + + analysis.with_data(ty, |data| { + assert!( + matches!(data, TyData::Object(_)), + "expected object type for bind equals, got {:?}", + data + ); + let TyData::Object(obj) = data else { + return; + }; + assert!(!obj.has_unknown); + assert_eq!(obj.fields.len(), 1); + assert_eq!(obj.fields[0].0, "a"); + assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); + }); + } + + #[test] + fn test_type_at_position_function_sugar_param_is_parameter_type() { + let code = "local f(x) = x + 1; f"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let param_offset = nth_offset(code, "x)", 0); + let ty = analysis + .type_at_position(&root, param_offset) + .expect("should find type at function-sugar parameter token"); + + assert_eq!(ty, Ty::ANY); + } + + #[test] + fn test_type_at_position_function_sugar_equals_is_function_type() { + let code = "local f(x) = x + 1; f"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let equals_offset = nth_offset(code, "=", 0); + let ty = analysis + .type_at_position(&root, equals_offset) + .expect("should find type at function-sugar equals token"); + + analysis.with_data(ty, |data| { + assert!( + matches!(data, TyData::Function(_)), + "expected function type for bind equals, got {:?}", + data + ); + let TyData::Function(func) = data else { + return; + }; + assert_eq!(func.params.len(), 1); + assert_eq!(func.params[0].name, "x"); + assert_eq!(func.params[0].ty, Ty::ANY); + assert_eq!( + func.return_spec, + jrsonnet_lsp_types::ReturnSpec::Fixed(Ty::NUMBER) + ); + }); + } + + #[test] + fn test_type_at_position_function_sugar_default_param_reflects_default_type() { + let code = "local f(x = 1) = x; f"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let param_offset = nth_offset(code, "x =", 0); + let ty = analysis + .type_at_position(&root, param_offset) + .expect("should find type at defaulted function-sugar parameter"); + + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_type_at_position_function_sugar_param_multiline_is_parameter_type() { + let code = "local f(x) = x + 1;\nf"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let param_offset = nth_offset(code, "x) =", 0); + let ty = analysis + .type_at_position(&root, param_offset) + .expect("should find type at multiline function-sugar parameter token"); + + assert_eq!(ty, Ty::ANY); + } + + #[test] + fn test_type_at_position_function_sugar_with_resolver_keeps_parameter_type() { + let code = "local f(x) = x + 1;\nf"; + let (analysis, doc) = analyze_doc_with_resolver(code); + let root = doc.ast().syntax().clone(); + + let param_offset = nth_offset(code, "x) =", 0); + let ty = analysis + .type_at_position(&root, param_offset) + .expect("should find type at function-sugar parameter token with resolver"); + + assert_eq!(ty, Ty::ANY); + } + + #[test] + fn test_fields_at_position_object() { + let code = "{ a: 1, b: 2 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let fields = analysis + .fields_at_position(&root, rowan::TextSize::new(0)) + .expect("should find fields at position 0"); + let mut names: Vec<_> = fields.iter().map(|(n, _)| n.as_str()).collect(); + names.sort_unstable(); + assert_eq!(names, vec!["a", "b"]); + } + + #[test] + fn test_extract_fields_union_merges() { + let code = "if true then { a: 1 } else { b: 2 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(0)) + .expect("should find type at position 0"); + + let fields = analysis + .extract_fields(ty) + .expect("should extract fields from union type"); + let mut names: Vec<_> = fields.iter().map(|(n, _)| n.as_str()).collect(); + names.sort_unstable(); + assert_eq!(names, vec!["a", "b"]); + } + + #[test] + fn test_is_closed_object() { + let code = "{ a: 1 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(0)) + .unwrap(); + assert!(analysis.is_closed_object(ty)); + } + + fn nth_offset(code: &str, needle: &str, occurrence: usize) -> rowan::TextSize { + let idx = code + .match_indices(needle) + .nth(occurrence) + .map(|(idx, _)| idx) + .expect("expected needle occurrence in source"); + rowan::TextSize::new(u32::try_from(idx).expect("offset should fit into u32")) + } + + #[test] + fn test_type_at_position_uses_flow_narrowing_in_else_branch() { + let code = r#"local f(x) = + assert std.isNumber(x) || std.isString(x); + if std.isNumber(x) then + x + 1 + else + std.length(x); +f(3) + f("hi")"#; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + let call_start = nth_offset(code, "std.length(x)", 0); + let x_offset = call_start + rowan::TextSize::new(11); + + let ty = analysis + .type_at_position(&root, x_offset) + .expect("expected type at else branch x"); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_type_at_position_uses_flow_narrowing_for_non_null_branch() { + let code = r"local f(x) = + assert x == null || std.isString(x); + if x != null then + std.length(x) + else + 0; +f(null)"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + let call_start = nth_offset(code, "std.length(x)", 0); + let x_offset = call_start + rowan::TextSize::new(11); + + let ty = analysis + .type_at_position(&root, x_offset) + .expect("expected type at non-null branch x"); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_object_has_field() { + let code = "{ existing: 1 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(0)) + .unwrap(); + assert_eq!(analysis.object_has_field(ty, "existing"), Some(true)); + assert_eq!(analysis.object_has_field(ty, "nonexistent"), Some(false)); + } + + #[test] + fn test_object_field_names() { + let code = "{ x: 1, y: 2, z: 3 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(0)) + .expect("should find type at position 0"); + let mut names = analysis + .object_field_names(ty) + .expect("should get field names"); + names.sort(); + assert_eq!( + names, + vec!["x".to_string(), "y".to_string(), "z".to_string()] + ); + } + + #[test] + fn test_is_predicates() { + let (analysis, doc) = analyze_doc("[1, 2, 3]"); + let root = doc.ast().syntax().clone(); + let array_ty = analysis + .type_at_position(&root, rowan::TextSize::new(0)) + .unwrap(); + + assert!(analysis.is_indexable(array_ty)); + assert!(analysis.is_sliceable(array_ty)); + assert!(!analysis.is_callable(array_ty)); + assert!(!analysis.supports_field_access(array_ty)); + } + + #[test] + fn test_document_type() { + let (analysis, _doc) = analyze_doc("42"); + assert_eq!(analysis.document_type(), Ty::NUMBER); + } + + #[test] + fn test_document_type_object() { + let (analysis, _doc) = analyze_doc("{ a: 1 }"); + assert!(analysis.is_closed_object(analysis.document_type())); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/const_eval.rs b/crates/jrsonnet-lsp-inference/src/const_eval.rs new file mode 100644 index 00000000..f75b544b --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/const_eval.rs @@ -0,0 +1,629 @@ +//! Const-eval: trace expressions through bindings without full evaluation. +//! +//! This module provides static analysis to trace expressions through local bindings +//! and field accesses. This enables go-to-definition to work for patterns like: +//! +//! ```jsonnet +//! local x = import "lib.libsonnet"; +//! local y = x; +//! y.foo.bar // go-to-def on 'bar' should trace through y -> x -> import -> foo -> bar +//! ``` + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_import::extract_import_path; +use jrsonnet_lsp_scope::{ + find_definition_range, ident_resolves_to_builtin_std, var_resolves_to_builtin_std, +}; +use jrsonnet_rowan_parser::{ + nodes::{ + BindDestruct, Expr, ExprBase, ExprField, ExprImport, ExprIndex, ExprVar, FieldName, Member, + Name, ObjBody, ObjBodyMemberList, + }, + AstNode, AstToken, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use rowan::TextRange; +use rustc_hash::FxHashSet; + +/// Result of const-eval tracing. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ConstEvalResult { + /// Traced to an import path with optional field chain. + Import { + /// The import path. + path: String, + /// Field names to navigate after import (e.g., `["foo", "bar"]` for import.foo.bar). + fields: Vec, + }, + /// Traced to a local expression (couldn't trace further). + Local { + /// The text range of the expression. + range: TextRange, + }, + /// Traced to std library, optionally with a field. + Std { + /// The field name if accessing a std field (e.g., "length" for std.length). + field: Option, + }, +} + +/// Context for const-eval to prevent infinite loops. +struct EvalContext { + /// Set of (expression range) pairs we've already visited. + seen: FxHashSet, + /// Maximum depth to prevent runaway recursion. + max_depth: usize, + /// Current depth. + depth: usize, +} + +impl EvalContext { + fn new() -> Self { + Self { + seen: FxHashSet::default(), + max_depth: 50, + depth: 0, + } + } + + /// Check if we should continue (not seen, not too deep). + fn enter(&mut self, range: TextRange) -> bool { + if self.depth >= self.max_depth { + return false; + } + if !self.seen.insert(range) { + return false; + } + self.depth += 1; + true + } + + fn exit(&mut self) { + self.depth = self.depth.saturating_sub(1); + } +} + +/// Trace an expression to find what it evaluates to statically. +/// +/// This doesn't do full evaluation - it traces through: +/// - Variable references to their definitions +/// - Field accesses on objects +/// - Import expressions +/// - Local bindings +#[must_use] +pub fn trace_expr(expr: &Expr, document: &Document) -> Option { + let mut ctx = EvalContext::new(); + trace_expr_inner(expr, document, &mut ctx) +} + +/// Trace from an `ExprBase`. +#[must_use] +pub fn trace_base(base: &ExprBase, document: &Document) -> Option { + let mut ctx = EvalContext::new(); + trace_base_inner(base, document, &mut ctx) +} + +/// Trace from an identifier token to what it refers to. +#[must_use] +pub fn trace_ident(token: &SyntaxToken, document: &Document) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let mut ctx = EvalContext::new(); + if ident_resolves_to_builtin_std(token) { + return Some(ConstEvalResult::Std { field: None }); + } + let name = token.text(); + + // Find the definition of this variable + let def_range = find_definition_range(token, name)?; + + if !ctx.enter(def_range) { + return None; + } + + // Find the bind at that definition site and trace its value + let result = trace_definition_value(def_range, document, &mut ctx); + ctx.exit(); + result +} + +fn trace_expr_inner( + expr: &Expr, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + let range = expr.syntax().text_range(); + if !ctx.enter(range) { + return None; + } + + // First get the base expression + let base = expr.expr_base()?; + let result = trace_base_inner(&base, document, ctx); + + ctx.exit(); + result +} + +fn trace_base_inner( + base: &ExprBase, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + match base { + ExprBase::ExprVar(var) => trace_var(var, document, ctx), + ExprBase::ExprImport(import) => trace_import(import), + ExprBase::ExprField(field) => trace_field(field, document, ctx), + ExprBase::ExprIndex(index) => trace_index(index, document, ctx), + ExprBase::ExprParened(parens) => { + let inner = parens.expr()?; + trace_expr_inner(&inner, document, ctx) + } + // For other expressions, we can't trace further statically + _ => Some(ConstEvalResult::Local { + range: base.syntax().text_range(), + }), + } +} + +/// Trace a variable reference. +fn trace_var(var: &ExprVar, document: &Document, ctx: &mut EvalContext) -> Option { + if var_resolves_to_builtin_std(var) { + return Some(ConstEvalResult::Std { field: None }); + } + let name_node = var.name()?; + let ident = name_node.ident_lit()?; + let name = ident.text(); + + // Find the definition + let def_range = find_definition_range(&ident, name)?; + + if !ctx.enter(def_range) { + return Some(ConstEvalResult::Local { range: def_range }); + } + + let result = trace_definition_value(def_range, document, ctx); + ctx.exit(); + + result.or(Some(ConstEvalResult::Local { range: def_range })) +} + +/// Trace an import expression. +fn trace_import(import: &ExprImport) -> Option { + let path = extract_import_path(import)?; + Some(ConstEvalResult::Import { + path, + fields: vec![], + }) +} + +/// Trace a field access (e.g., `x.foo`). +fn trace_field( + field: &ExprField, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + // Get the field name + let field_name_node = field.field()?; + let field_name = extract_field_name(&field_name_node)?; + + // Get the base expression + let base_expr = field.base()?; + let base = base_expr.expr_base()?; + + // Trace the base + let base_result = trace_base_inner(&base, document, ctx)?; + + // Append the field to the result + match base_result { + ConstEvalResult::Import { path, mut fields } => { + fields.push(field_name); + Some(ConstEvalResult::Import { path, fields }) + } + ConstEvalResult::Std { field: None } => Some(ConstEvalResult::Std { + field: Some(field_name), + }), + ConstEvalResult::Std { field: Some(_) } => { + // std.foo.bar - can't trace further into std fields + None + } + ConstEvalResult::Local { range } => { + // Try to find the field in the local object + let ast = document.ast(); + trace_field_in_object(range, &field_name, ast.syntax(), document, ctx) + } + } +} + +/// Trace an index access (e.g., `x["foo"]`). +fn trace_index( + index: &ExprIndex, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + // Get the index expression - must be a string literal for us to trace + let index_expr = index.index()?; + let field_name = extract_string_literal(&index_expr)?; + + // Get the base expression + let base_expr = index.base()?; + let base = base_expr.expr_base()?; + + // Trace the base + let base_result = trace_base_inner(&base, document, ctx)?; + + // Append the field to the result + match base_result { + ConstEvalResult::Import { path, mut fields } => { + fields.push(field_name); + Some(ConstEvalResult::Import { path, fields }) + } + ConstEvalResult::Std { field: None } => Some(ConstEvalResult::Std { + field: Some(field_name), + }), + ConstEvalResult::Std { field: Some(_) } => None, + ConstEvalResult::Local { range } => { + let ast = document.ast(); + trace_field_in_object(range, &field_name, ast.syntax(), document, ctx) + } + } +} + +/// Find the value bound at a definition site and trace it. +fn trace_definition_value( + def_range: TextRange, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + let ast = document.ast(); + let root = ast.syntax(); + + // Find the node at the definition range + for node in root.descendants() { + if node.text_range() == def_range { + // Found the definition name - now find its value + return trace_bind_value(&node, document, ctx); + } + } + None +} + +/// Given a definition name node, find and trace the bound value. +fn trace_bind_value( + name_node: &SyntaxNode, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + // Walk up to find the Bind node + let bind_node = name_node.ancestors().find(|n| { + matches!( + n.kind(), + SyntaxKind::BIND_DESTRUCT | SyntaxKind::BIND_FUNCTION + ) + })?; + + // Look for BindDestruct with a value + if let Some(bind_destruct) = BindDestruct::cast(bind_node) { + let value_expr = bind_destruct.value()?; + return trace_expr_inner(&value_expr, document, ctx); + } + + // BindFunction doesn't have a traceable value in the same way + None +} + +/// Try to find a field in an object at the given range. +fn trace_field_in_object( + obj_range: TextRange, + field_name: &str, + root: &SyntaxNode, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + // Find the node at the object range + let node = root.descendants().find(|n| n.text_range() == obj_range)?; + + // Try to get object body from Expr wrapper + if let Some(obj_body) = Expr::cast(node.clone()) + .and_then(|e| e.expr_base()) + .and_then(|base| match base { + ExprBase::ExprObject(obj) => obj.obj_body(), + _ => None, + }) { + return find_field_in_object_body(&obj_body, field_name, document, ctx); + } + + // Try direct ExprObject + let obj = jrsonnet_rowan_parser::nodes::ExprObject::cast(node)?; + find_field_in_object_body(&obj.obj_body()?, field_name, document, ctx) +} + +/// Find a field value in an object body. +fn find_field_in_object_body( + body: &ObjBody, + field_name: &str, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + match body { + ObjBody::ObjBodyMemberList(members) => { + find_field_in_member_list(members, field_name, document, ctx) + } + ObjBody::ObjBodyComp(_) => { + // Object comprehensions are too dynamic to trace + None + } + } +} + +/// Find a field in an object member list. +fn find_field_in_member_list( + members: &ObjBodyMemberList, + field_name: &str, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + for member in members.members() { + match &member { + Member::MemberFieldNormal(field) => { + let name = extract_field_key_name(&field.field_name()?)?; + if name != field_name { + continue; + } + + let value = field.expr()?; + return trace_expr_inner(&value, document, ctx); + } + Member::MemberFieldMethod(method) => { + let name = extract_field_key_name(&method.field_name()?)?; + if name != field_name { + continue; + } + + let method_range = method.syntax().text_range(); + let body = method.expr()?; + return trace_expr_inner(&body, document, ctx).or(Some(ConstEvalResult::Local { + range: method_range, + })); + } + Member::MemberBindStmt(_) | Member::MemberAssertStmt(_) => {} + } + } + None +} + +/// Extract the key name from a `FieldName` node. +fn extract_field_key_name(field_name: &FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => { + // Try identifier first + if let Some(name) = fixed.id().and_then(|id| id.ident_lit()) { + return Some(name.text().to_string()); + } + // Try string literal (text token) + let text = fixed.text()?; + Some(strip_quotes(text.text())) + } + FieldName::FieldNameDynamic(dynamic) => { + // Dynamic field name like [expr] - try to extract if it's a string literal + extract_string_literal(&dynamic.expr()?) + } + } +} + +/// Extract a field name from a Name node. +fn extract_field_name(name: &Name) -> Option { + let ident = name.ident_lit()?; + Some(ident.text().to_string()) +} + +/// Extract a string literal value from an expression. +fn extract_string_literal(expr: &Expr) -> Option { + let ExprBase::ExprString(str_expr) = expr.expr_base()? else { + return None; + }; + let text = str_expr.syntax().text().to_string(); + Some(strip_quotes(&text)) +} + +/// Strip quotes from a string literal. +fn strip_quotes(s: &str) -> String { + s.trim_start_matches('@') + .trim_start_matches('"') + .trim_start_matches('\'') + .trim_end_matches('"') + .trim_end_matches('\'') + .to_string() +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{token_at_offset, ByteOffset, DocVersion}; + + use super::*; + + fn make_doc(code: &str) -> Document { + Document::new(code.to_string(), DocVersion::new(1)) + } + + fn find_expr_at(doc: &Document, offset: u32) -> Option { + let ast = doc.ast(); + let token = token_at_offset(ast.syntax(), ByteOffset::new(offset))?; + // Find the outermost Expr containing this token (last match in ancestors) + token.parent_ancestors().filter_map(Expr::cast).last() + } + + #[test] + fn test_trace_direct_import() { + let code = r#"import "lib.libsonnet""#; + let doc = make_doc(code); + let expr = find_expr_at(&doc, 0).unwrap(); + + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec![], + }) + ); + } + + #[test] + fn test_trace_var_to_import() { + let code = r#"local x = import "lib.libsonnet"; x"#; + let doc = make_doc(code); + + // Find the 'x' at the end + let ast = doc.ast(); + let token = token_at_offset(ast.syntax(), ByteOffset::new(34)) + .expect("should find token at offset 34"); + assert_eq!(token.text(), "x"); + + let result = trace_ident(&token, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec![], + }) + ); + } + + #[test] + fn test_trace_chained_vars() { + let code = r#"local x = import "lib.libsonnet"; local y = x; y"#; + let doc = make_doc(code); + + // Find the 'y' at the end + let ast = doc.ast(); + let token = token_at_offset(ast.syntax(), ByteOffset::new(47)) + .expect("should find token at offset 47"); + assert_eq!(token.text(), "y"); + + let result = trace_ident(&token, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec![], + }) + ); + } + + #[test] + fn test_trace_std() { + let code = r"std"; + let doc = make_doc(code); + + let ast = doc.ast(); + let token = token_at_offset(ast.syntax(), ByteOffset::new(0)); + let token = token.unwrap(); + + let result = trace_ident(&token, &doc); + assert_eq!(result, Some(ConstEvalResult::Std { field: None })); + } + + #[test] + fn test_trace_std_field() { + let code = r"std.length"; + let doc = make_doc(code); + + // The entire expression should trace to Std with field + let expr = find_expr_at(&doc, 0).unwrap(); + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Std { + field: Some("length".to_string()) + }) + ); + } + + #[test] + fn test_trace_import_field() { + let code = r#"local lib = import "lib.libsonnet"; lib.foo"#; + let doc = make_doc(code); + + // Find 'lib.foo' expression - position at 'foo' + let expr = find_expr_at(&doc, 40).unwrap(); + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec!["foo".to_string()], + }) + ); + } + + #[test] + fn test_trace_nested_import_fields() { + let code = r#"local lib = import "lib.libsonnet"; lib.foo.bar"#; + let doc = make_doc(code); + + // Find 'lib.foo.bar' expression + let expr = find_expr_at(&doc, 44).unwrap(); + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec!["foo".to_string(), "bar".to_string()], + }) + ); + } + + #[test] + fn test_trace_through_chained_var_with_field() { + let code = r#"local x = import "lib.libsonnet"; local y = x; y.foo"#; + let doc = make_doc(code); + + // Find 'y.foo' expression + let expr = find_expr_at(&doc, 51).unwrap(); + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec!["foo".to_string()], + }) + ); + } + + #[test] + fn test_trace_method_field_body() { + let code = r#"local obj = { foo(x): import "lib.libsonnet" }; obj.foo"#; + let doc = make_doc(code); + + let expr = find_expr_at(&doc, 51).expect("should find `obj.foo` expression"); + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec![], + }) + ); + } + + #[test] + fn test_trace_cycle_detection() { + // This would cause infinite recursion without cycle detection + // local x = x; x + // ^6 ^10 ^13 + let code = r"local x = x; x"; + let doc = make_doc(code); + + let ast = doc.ast(); + let token = token_at_offset(ast.syntax(), ByteOffset::new(13)); + let token = token.unwrap(); + + // Should not panic or hang - returns None because cycle detection + // kicks in when we try to trace the value `x` which refers to itself + let result = trace_ident(&token, &doc); + // Cycle detection returns None when we can't make progress + assert_eq!(result, None); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/env.rs b/crates/jrsonnet-lsp-inference/src/env.rs new file mode 100644 index 00000000..b4c44b5b --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/env.rs @@ -0,0 +1,344 @@ +//! Type environment for tracking variable types during inference. + +use std::sync::Arc; + +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore, MutStore, Ty}; +use rowan::TextRange; +use rustc_hash::{FxHashMap, FxHashSet}; + +/// Maximum depth for function body inference to prevent infinite recursion. +pub const MAX_FUNCTION_INFERENCE_DEPTH: usize = 5; + +/// Trait for resolving import types during analysis. +/// +/// Implementations can look up cached types for imported files, +/// enabling cross-file type sharing. +pub trait ImportResolver: std::fmt::Debug { + /// Resolve the type for an import path. + /// + /// Returns `Some(ty)` if the imported file's type is known, + /// `None` otherwise (falls back to `Ty::ANY`). + fn resolve_import(&self, import_path: &str) -> Option; +} + +/// Context for `self` and `super` within object literals (using interned types). +#[derive(Debug, Clone)] +pub struct ObjectContextInterned { + /// The type of the current object (for `self` and `$` references). + pub self_type: Ty, + /// The type of the object being extended (for `super` references). + pub super_type: Option, +} + +/// Type environment for tracking variable types during inference. +/// +/// Uses interned `Ty` references for memory efficiency and O(1) equality. +/// The `MutStore` combines a global store (shared across files) with a local +/// store (per-file types created during analysis). +#[derive(Debug)] +pub struct TypeEnv { + /// Type store for interning types - combines global and local. + store: MutStore, + /// Stack of scopes, each mapping variable names to interned types. + scopes: Vec>, + /// Current depth of function body inference (to prevent infinite recursion). + function_depth: usize, + /// Constraints observed on parameters (parameter name -> observed type constraints). + /// Only populated when `in_constraint_mode` is true. + constraints: FxHashMap>, + /// Whether we're currently tracking parameter constraints. + in_constraint_mode: bool, + /// Set of parameter names we're tracking constraints for. + tracked_params: FxHashSet, + /// Stack of object contexts for `self` and `super` references. + /// The innermost object is at the end. + object_context: Vec, + /// Cache of inferred function types by their text range. + /// This prevents redundant inference and helps with recursive functions. + function_cache: FxHashMap, + /// Set of function ranges currently being inferred (to detect cycles). + functions_in_progress: FxHashSet, + /// Optional import resolver for cross-file type sharing. + import_resolver: Option>, +} + +impl TypeEnv { + /// Create a new type environment with a global store. + pub fn new(global: Arc) -> Self { + Self { + store: MutStore::new(global), + scopes: vec![FxHashMap::default()], + function_depth: 0, + constraints: FxHashMap::default(), + in_constraint_mode: false, + tracked_params: FxHashSet::default(), + object_context: Vec::new(), + function_cache: FxHashMap::default(), + functions_in_progress: FxHashSet::default(), + import_resolver: None, + } + } + + /// Create a new type environment with a global store and import resolver. + pub fn with_import_resolver( + global: Arc, + import_resolver: Arc, + ) -> Self { + Self { + store: MutStore::new(global), + scopes: vec![FxHashMap::default()], + function_depth: 0, + constraints: FxHashMap::default(), + in_constraint_mode: false, + tracked_params: FxHashSet::default(), + object_context: Vec::new(), + function_cache: FxHashMap::default(), + functions_in_progress: FxHashSet::default(), + import_resolver: Some(import_resolver), + } + } + + /// Create a new type environment with a default global store. + /// + /// This is primarily for tests and backwards compatibility. + #[must_use] + pub fn new_default() -> Self { + Self::new(Arc::new(GlobalTyStore::new())) + } + + /// Resolve an import path to a type using the import resolver. + /// + /// Returns `Some(ty)` if the resolver is set and the import is cached, + /// `None` otherwise. + #[must_use] + pub fn resolve_import(&self, import_path: &str) -> Option { + self.import_resolver.as_ref()?.resolve_import(import_path) + } + + /// Get an immutable reference to the type store. + #[must_use] + pub fn store(&self) -> &MutStore { + &self.store + } + + /// Get a mutable reference to the type store. + pub fn store_mut(&mut self) -> &mut MutStore { + &mut self.store + } + + /// Consume and return the `MutStore`. + /// + /// This is used when transferring the store after analysis. + #[must_use] + pub fn into_store(self) -> MutStore { + self.store + } + + /// Check if we can infer function bodies (not too deep). + #[must_use] + pub fn can_infer_function_body(&self) -> bool { + self.function_depth < MAX_FUNCTION_INFERENCE_DEPTH + } + + /// Enter a function body for inference. + pub fn enter_function(&mut self) { + self.function_depth += 1; + } + + /// Exit a function body after inference. + pub fn exit_function(&mut self) { + self.function_depth -= 1; + } + + /// Check if a function type has been cached for the given range. + #[must_use] + pub fn get_cached_function(&self, range: TextRange) -> Option { + self.function_cache.get(&range).copied() + } + + /// Cache a function type for the given range. + pub fn cache_function(&mut self, range: TextRange, ty: Ty) { + self.function_cache.insert(range, ty); + } + + /// Check if a function is currently being inferred (cycle detection). + #[must_use] + pub fn is_function_in_progress(&self, range: TextRange) -> bool { + self.functions_in_progress.contains(&range) + } + + /// Mark a function as being inferred. + pub fn start_function_inference(&mut self, range: TextRange) { + self.functions_in_progress.insert(range); + } + + /// Mark a function as finished being inferred. + pub fn finish_function_inference(&mut self, range: TextRange) { + self.functions_in_progress.remove(&range); + } + + /// Start tracking parameter constraints for the given parameter names. + pub fn start_constraint_tracking(&mut self, param_names: &[String]) { + self.in_constraint_mode = true; + self.constraints.clear(); + self.tracked_params.clear(); + for name in param_names { + self.tracked_params.insert(name.clone()); + } + } + + /// Stop tracking parameter constraints and return the collected constraints. + pub fn stop_constraint_tracking_ty(&mut self) -> FxHashMap> { + self.in_constraint_mode = false; + self.tracked_params.clear(); + std::mem::take(&mut self.constraints) + } + + /// Add a constraint for a tracked parameter. + pub fn add_constraint_ty(&mut self, var_name: &str, constraint: Ty) { + if self.in_constraint_mode && self.tracked_params.contains(var_name) { + self.constraints + .entry(var_name.to_string()) + .or_default() + .push(constraint); + } + } + + /// Check if a variable is being tracked for constraints. + #[must_use] + pub fn is_tracked_param(&self, var_name: &str) -> bool { + self.in_constraint_mode && self.tracked_params.contains(var_name) + } + + /// Push a new scope. + pub fn push_scope(&mut self) { + self.scopes.push(FxHashMap::default()); + } + + /// Pop the current scope. + pub fn pop_scope(&mut self) { + if self.scopes.len() > 1 { + self.scopes.pop(); + } + } + + /// Define a variable in the current scope. + pub fn define_ty(&mut self, name: String, ty: Ty) { + if let Some(scope) = self.scopes.last_mut() { + scope.insert(name, ty); + } + } + + /// Look up a variable in all scopes (innermost first). + #[must_use] + pub fn lookup(&self, name: &str) -> Option { + for scope in self.scopes.iter().rev() { + if let Some(&ty) = scope.get(name) { + return Some(ty); + } + } + None + } + + /// Narrow a variable's type by intersecting with a new type. + /// + /// This is used for flow typing - when we know additional constraints + /// about a variable's type from a type guard. + pub fn narrow(&mut self, name: &str, narrowed_type: Ty) { + // Find the current type + let current_ty = self.lookup(name).unwrap_or(Ty::ANY); + // Use MutStore::narrow() directly + let new_ty = self.store.narrow(current_ty, narrowed_type); + // Define in current scope (shadows outer definition) + self.define_ty(name.to_string(), new_ty); + } + + /// Widen a variable's type by subtracting a type. + /// + /// This is used for flow typing in else branches - when we know + /// a type guard returned false. + pub fn widen(&mut self, name: &str, remove_type: Ty) { + // Find the current type + let current_ty = self.lookup(name).unwrap_or(Ty::ANY); + // Use MutStore::widen() directly + let new_ty = self.store.widen(current_ty, remove_type); + // Define in current scope (shadows outer definition) + self.define_ty(name.to_string(), new_ty); + } + + /// Enter an object literal context, making `self` available with the given type. + /// + /// Call this when starting to infer field types within an object literal. + /// The `super_type` should be provided when this is an object extension. + pub fn push_object_context_ty(&mut self, self_type: Ty, super_type: Option) { + self.object_context.push(ObjectContextInterned { + self_type, + super_type, + }); + } + + /// Exit the current object literal context. + pub fn pop_object_context(&mut self) { + self.object_context.pop(); + } + + /// Get the interned `Ty` for `self` in the current context. + #[must_use] + pub fn self_ty(&self) -> Option { + self.object_context.last().map(|ctx| ctx.self_type) + } + + /// Get the interned `Ty` for `super` in the current context. + #[must_use] + pub fn super_ty(&self) -> Option { + self.object_context.last().and_then(|ctx| ctx.super_type) + } + + /// Get the interned `Ty` for `$` (root object) in the current context. + #[must_use] + pub fn root_ty(&self) -> Option { + self.object_context.first().map(|ctx| ctx.self_type) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_type_env_scoping() { + let mut env = TypeEnv::new_default(); + + env.define_ty("x".to_string(), Ty::NUMBER); + assert_eq!(env.lookup("x"), Some(Ty::NUMBER)); + + env.push_scope(); + env.define_ty("x".to_string(), Ty::STRING); + assert_eq!(env.lookup("x"), Some(Ty::STRING)); + + env.pop_scope(); + assert_eq!(env.lookup("x"), Some(Ty::NUMBER)); + } + + #[test] + fn test_env_narrow() { + let mut env = TypeEnv::new_default(); + env.define_ty("x".to_string(), Ty::ANY); + env.push_scope(); + env.narrow("x", Ty::NUMBER); + assert_eq!(env.lookup("x"), Some(Ty::NUMBER)); + env.pop_scope(); + assert_eq!(env.lookup("x"), Some(Ty::ANY)); + } + + #[test] + fn test_env_widen() { + let mut env = TypeEnv::new_default(); + let union = env.store_mut().union(vec![Ty::NUMBER, Ty::NULL]); + env.define_ty("x".to_string(), union); + env.push_scope(); + env.widen("x", Ty::NULL); + assert_eq!(env.lookup("x"), Some(Ty::NUMBER)); + env.pop_scope(); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/expr/advanced.rs b/crates/jrsonnet-lsp-inference/src/expr/advanced.rs new file mode 100644 index 00000000..7a81f488 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/expr/advanced.rs @@ -0,0 +1,1581 @@ +use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; +use jrsonnet_lsp_types::{ + FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyData, +}; +use jrsonnet_rowan_parser::{nodes::ExprBase, AstNode}; +use rustc_hash::FxHashMap; + +use super::{bind_destruct_with_type_ty, infer_expr_ty_impl, TypeRecorder}; +use crate::{ + env::TypeEnv, flow, helpers::extract_params_with_default_types_ty, + object::infer_object_type_with_super_ty, +}; + +pub(super) fn infer_call_expr_base_ty( + call: &jrsonnet_rowan_parser::nodes::ExprCall, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + let base_ty = call + .callee() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + + let mut arg_types = Vec::new(); + if let Some(args) = call.args_desc() { + for arg in args.args() { + if let Some(expr) = arg.expr() { + let arg_ty = infer_expr_ty_impl(&expr, env, None, recorder); + if arg_ty == Ty::NEVER { + return Ty::NEVER; + } + arg_types.push(arg_ty); + } + } + } + + if let Some(filtered_ty) = infer_std_filter_result_ty(call, &arg_types, env) { + return filtered_ty; + } + + let store = env.store_mut(); + if let TyData::Function(ref func_data) = store.get(base_ty) { + let return_spec = func_data.return_spec.clone(); + return match return_spec { + ReturnSpec::Fixed(ret_ty) => ret_ty, + ReturnSpec::SameAsArg(idx) => arg_types.get(idx).copied().unwrap_or(Ty::ANY), + ReturnSpec::NonNegative => store.bounded_number(NumBounds::non_negative()), + ReturnSpec::ArrayOfArg(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + store.array(arg_ty) + } + ReturnSpec::ArrayWithSameElements(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + match store.get(arg_ty) { + TyData::Array { elem, .. } => store.array(elem), + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + let elem_union = store.union(elems_copy); + store.array(elem_union) + } + _ => store.array(Ty::ANY), + } + } + ReturnSpec::SetWithSameElements(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + match store.get(arg_ty) { + TyData::Array { elem, .. } => store.array_set(elem), + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + let elem_union = store.union(elems_copy); + store.array_set(elem_union) + } + _ => store.array_set(Ty::ANY), + } + } + ReturnSpec::FuncReturnType(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + match store.get(arg_ty) { + TyData::Function(ref func_data) => match &func_data.return_spec { + ReturnSpec::Fixed(ret) => *ret, + _ => Ty::ANY, + }, + _ => Ty::ANY, + } + } + ReturnSpec::ArrayOfFuncReturn(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + let ret_ty = match store.get(arg_ty) { + TyData::Function(ref func_data) => match &func_data.return_spec { + ReturnSpec::Fixed(ret) => *ret, + _ => Ty::ANY, + }, + _ => Ty::ANY, + }; + store.array(ret_ty) + } + ReturnSpec::FlatMapResult(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + match store.get(arg_ty) { + TyData::Function(ref func_data) => match &func_data.return_spec { + ReturnSpec::Fixed(ret) => match store.get(*ret) { + TyData::Array { elem, .. } => store.array(elem), + _ => store.array(Ty::ANY), + }, + _ => store.array(Ty::ANY), + }, + _ => store.array(Ty::ANY), + } + } + ReturnSpec::ObjectValuesType(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + match store.get(arg_ty) { + TyData::Object(ref obj_data) => { + if obj_data.fields.is_empty() { + store.array(Ty::ANY) + } else { + let field_types: Vec = + obj_data.fields.iter().map(|(_, fd)| fd.ty).collect(); + let elem_union = store.union(field_types); + store.array(elem_union) + } + } + _ => store.array(Ty::ANY), + } + } + }; + } + + Ty::ANY +} + +fn infer_std_filter_result_ty( + call: &jrsonnet_rowan_parser::nodes::ExprCall, + arg_types: &[Ty], + env: &mut TypeEnv, +) -> Option { + let callee = call.callee()?; + let ExprBase::ExprField(field) = callee.expr_base()? else { + return None; + }; + let base = field.base()?; + if !expr_resolves_to_builtin_std(&base) { + return None; + } + if field.field()?.ident_lit()?.text() != "filter" { + return None; + } + + let args_desc = call.args_desc()?; + let args: Vec<_> = args_desc.args().collect(); + let [pred_arg, _arr_arg] = args.as_slice() else { + return None; + }; + let pred_expr = pred_arg.expr()?; + let arr_ty = arg_types.get(1).copied().unwrap_or(Ty::ANY); + let elem_fact = flow::extract_array_predicate_fact(&pred_expr)?; + let narrowed = elem_fact.apply_to(arr_ty, env.store_mut()); + Some(collection_to_array_ty(narrowed, env.store_mut())) +} + +fn collection_to_array_ty(ty: Ty, store: &mut jrsonnet_lsp_types::MutStore) -> Ty { + match store.get(ty) { + TyData::Array { elem, .. } => store.array(elem), + TyData::Tuple { ref elems } => { + let elems_copy = elems.clone(); + let elem_union = store.union(elems_copy); + store.array(elem_union) + } + TyData::Union(types) => { + let mut elem_types = Vec::new(); + for variant in types { + match store.get(variant) { + TyData::Array { elem, .. } => elem_types.push(elem), + TyData::Tuple { ref elems } => { + let elems_copy = elems.clone(); + elem_types.push(store.union(elems_copy)); + } + _ => return store.array(Ty::ANY), + } + } + if elem_types.is_empty() { + store.array(Ty::ANY) + } else { + let elem_union = store.union(elem_types); + store.array(elem_union) + } + } + _ => store.array(Ty::ANY), + } +} + +pub(super) fn infer_array_expr_base_ty( + arr: &jrsonnet_rowan_parser::nodes::ExprArray, + env: &mut TypeEnv, + expected: Option, + recorder: &mut R, +) -> Ty { + let (expected_array_elem, expected_tuple_elems) = expected.map_or((None, None), |exp_ty| { + let store = env.store(); + match store.get(exp_ty) { + TyData::Array { elem, .. } => (Some(elem), None), + TyData::Tuple { elems } => (None, Some(elems)), + _ => (None, None), + } + }); + + let elem_types: Vec = arr + .exprs() + .enumerate() + .map(|(i, e)| { + let elem_expected = expected_tuple_elems + .as_ref() + .and_then(|elems| elems.get(i).copied()) + .or(expected_array_elem); + infer_expr_ty_impl(&e, env, elem_expected, recorder) + }) + .collect(); + + if elem_types.is_empty() { + if let Some(elem_ty) = expected_array_elem { + return env.store_mut().array(elem_ty); + } + return env.store_mut().array(Ty::ANY); + } + + let all_known = elem_types.iter().all(|ty| *ty != Ty::ANY); + if all_known { + return env.store_mut().tuple(elem_types); + } + + let elem_ty = env.store_mut().union(elem_types); + env.store_mut().array(elem_ty) +} + +pub(super) fn infer_array_comp_expr_base_ty( + comp: &jrsonnet_rowan_parser::nodes::ExprArrayComp, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + env.push_scope(); + + for comp_spec in comp.comp_specs() { + match comp_spec { + jrsonnet_rowan_parser::nodes::CompSpec::ForSpec(for_spec) => { + let iter_ty = for_spec + .expr() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + + let elem_ty = { + let store = env.store_mut(); + match store.get(iter_ty) { + TyData::Array { elem, .. } => elem, + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + store.union(elems_copy) + } + TyData::Union(types) => { + let mut elem_types = Vec::new(); + for variant in types { + match store.get(variant) { + TyData::Array { elem, .. } => elem_types.push(elem), + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + elem_types.push(store.union(elems_copy)); + } + _ => { + elem_types.clear(); + break; + } + } + } + if elem_types.is_empty() { + Ty::ANY + } else { + store.union(elem_types) + } + } + _ if iter_ty == Ty::NEVER => Ty::NEVER, + _ => Ty::ANY, + } + }; + + if let Some(destruct) = for_spec.bind() { + bind_destruct_with_type_ty(&destruct, elem_ty, env); + } + } + jrsonnet_rowan_parser::nodes::CompSpec::IfSpec(if_spec) => { + let Some(cond) = if_spec.expr() else { + continue; + }; + let _ = infer_expr_ty_impl(&cond, env, None, recorder); + let facts = flow::extract_facts(&cond); + for (var_name, fact) in facts.iter() { + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); + env.define_ty(var_name.clone(), narrowed_ty); + } + } + } + } + + let body_ty = comp + .expr() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + + env.pop_scope(); + env.store_mut().array(body_ty) +} + +pub(super) fn infer_function_expr_base_ty( + func: &jrsonnet_rowan_parser::nodes::ExprFunction, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + let func_range = func.syntax().text_range(); + + if let Some(cached) = env.get_cached_function(func_range) { + return cached; + } + + if env.is_function_in_progress(func_range) { + let params = func + .params_desc() + .map(|p| extract_params_with_default_types_ty(&p, env)) + .unwrap_or_default(); + let any_params: Vec<_> = params + .into_iter() + .map(|p| ParamInterned { + name: p.name, + ty: Ty::ANY, + has_default: p.has_default, + }) + .collect(); + let store = env.store_mut(); + return store.function(FunctionData { + params: any_params, + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }); + } + + env.start_function_inference(func_range); + + let params = func.params_desc().map_or_else(Vec::new, |params_desc| { + extract_params_with_default_types_ty(¶ms_desc, env) + }); + + let (return_ty, param_constraints) = if env.can_infer_function_body() { + func.expr().map_or_else( + || (Ty::ANY, FxHashMap::default()), + |body| { + env.push_scope(); + let param_names: Vec = params.iter().map(|p| p.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } + + env.start_constraint_tracking(¶m_names); + + env.enter_function(); + let body_ty = infer_expr_ty_impl(&body, env, None, recorder); + env.exit_function(); + + let constraints = env.stop_constraint_tracking_ty(); + + env.pop_scope(); + (body_ty, constraints) + }, + ) + } else { + (Ty::ANY, FxHashMap::default()) + }; + + let final_params: Vec = params + .into_iter() + .map(|p| { + let mut param_ty = p.ty; + if let Some(constraints) = param_constraints.get(&p.name) { + for constraint_ty in constraints { + param_ty = env.store_mut().narrow(param_ty, *constraint_ty); + } + } + ParamInterned { + name: p.name, + ty: param_ty, + has_default: p.has_default, + } + }) + .collect(); + + let result = env.store_mut().function(FunctionData { + params: final_params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }); + + env.finish_function_inference(func_range); + env.cache_function(func_range, result); + + result +} + +pub(super) fn infer_obj_extend_expr_base_ty( + extend: &jrsonnet_rowan_parser::nodes::ExprObjExtend, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + let base_ty = if let Some(base_expr) = extend.lhs_work() { + infer_expr_ty_impl(&base_expr, env, None, recorder) + } else { + env.store_mut().object_any() + }; + + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + + let extension_body = extend.rhs_work().and_then(|e| { + if let Some(ExprBase::ExprObject(obj)) = e.expr_base() { + obj.obj_body() + } else { + None + } + }); + + let extension_ty = infer_object_type_with_super_ty( + extension_body.as_ref(), + env, + Some(base_ty), + &mut |expr, env| infer_expr_ty_impl(expr, env, None, recorder), + ); + + let (base_data, ext_data) = { + let store = env.store(); + let base_data = if let TyData::Object(ref obj) = store.get(base_ty) { + Some(obj.clone()) + } else { + None + }; + let ext_data = if let TyData::Object(ref obj) = store.get(extension_ty) { + Some(obj.clone()) + } else { + None + }; + (base_data, ext_data) + }; + + match (base_data, ext_data) { + (Some(base_obj), Some(ext_obj)) => { + let merged = ObjectData::merge(&base_obj, &ext_obj); + env.store_mut().object(merged) + } + (None, Some(ext_obj)) => env.store_mut().object(ext_obj), + (Some(base_obj), None) => env.store_mut().object(base_obj), + (None, None) => env.store_mut().object_any(), + } +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeSet; + + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, FunctionData, MutStore, ObjectData, ParamInterned, ReturnSpec, + TyData, + }; + use rstest::rstest; + + use super::{super::*, *}; + + /// Assert that an `ObjectData` has exactly the specified field names. + fn assert_fields_ty(obj: &ObjectData, expected: &[&str]) { + let actual: BTreeSet<_> = obj.fields.iter().map(|(name, _)| name.as_str()).collect(); + let expected: BTreeSet<_> = expected.iter().copied().collect(); + assert_eq!(actual, expected, "Field mismatch"); + } + + /// Get `TyData` from `TypeEnv` for a Ty. + fn get_ty_data(env: &TypeEnv, ty: Ty) -> TyData { + env.store().get(ty) + } + + /// Try to extract tuple element types. + fn try_tuple(env: &TypeEnv, ty: Ty) -> Option> { + match env.store().get(ty) { + TyData::Tuple { elems } => Some(elems), + _ => None, + } + } + + /// Try to extract array element type. + fn try_array(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Array { elem, .. } => Some(elem), + _ => None, + } + } + + /// Try to extract array info (element type and `is_set` flag). + fn try_array_with_set(env: &TypeEnv, ty: Ty) -> Option<(Ty, bool)> { + match env.store().get(ty) { + TyData::Array { elem, is_set } => Some((elem, is_set)), + _ => None, + } + } + + /// Try to extract `ObjectData`. + fn try_object(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Object(obj) => Some(obj), + _ => None, + } + } + + /// Try to extract `FunctionData`. + fn try_function(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Function(func) => Some(func), + _ => None, + } + } + + /// Try to extract union variants. + fn try_union(env: &TypeEnv, ty: Ty) -> Option> { + match env.store().get(ty) { + TyData::Union(variants) => Some(variants), + _ => None, + } + } + + /// Get field definition from `ObjectData` by name. + fn get_field_ty<'a>(obj: &'a ObjectData, name: &str) -> Option<&'a FieldDefInterned> { + obj.fields + .iter() + .find(|(n, _)| n == name) + .map(|(_, def)| def) + } + + /// Helper to infer document type and return (Ty, `TypeEnv`) for testing. + fn infer_doc(code: &str) -> (Ty, TypeEnv) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + infer_document_type_ty(&doc) + } + + #[test] + fn test_infer_error_is_never() { + let (ty, _) = infer_doc(r#"error "fail""#); + assert_eq!(ty, Ty::NEVER); + } + + #[test] + fn test_infer_number() { + let (ty, _) = infer_doc("42"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_infer_modulo_remainder() { + let (ty, _) = infer_doc("5 % 2"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_infer_modulo_string_formatting() { + let (ty, _) = infer_doc(r#""hello %s" % "world""#); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_infer_string() { + let (ty, _) = infer_doc(r#""hello""#); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_infer_bool_true() { + let (ty, _) = infer_doc("true"); + assert_eq!(ty, Ty::TRUE); + } + + #[test] + fn test_infer_bool_false() { + let (ty, _) = infer_doc("false"); + assert_eq!(ty, Ty::FALSE); + } + + #[test] + fn test_infer_null() { + let (ty, _) = infer_doc("null"); + assert_eq!(ty, Ty::NULL); + } + + #[test] + fn test_infer_array() { + // Array literals with known element types become tuples + let (ty, env) = infer_doc("[1, 2, 3]"); + let elems = try_tuple(&env, ty).expect("expected tuple"); + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + } + + #[test] + fn test_infer_empty_array() { + let (ty, env) = infer_doc("[]"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::ANY); + } + + #[test] + fn test_infer_function() { + // x + y where x and y are Any defaults to Number + let (ty, env) = infer_doc("function(x, y) x + y"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ + ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + }, + ParamInterned { + name: "y".to_string(), + ty: Ty::ANY, + has_default: false + }, + ], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_infer_function_return_object() { + // Function that returns an object + let (ty, env) = infer_doc("function(x) { a: x }"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func.params, + vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + },] + ); + assert!(!func.variadic); + // Return type should be inferred as Object + let ReturnSpec::Fixed(ret_ty) = &func.return_spec else { + unreachable!("Expected Fixed return type"); + }; + let obj = try_object(&env, *ret_ty).expect("expected object"); + assert_fields_ty(&obj, &["a"]); + } + + #[test] + fn test_infer_function_return_number() { + // Function that returns a number + let (ty, env) = infer_doc("function() 42"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_infer_function_return_string() { + // Function that returns a string + let (ty, env) = infer_doc(r#"function(s) s + "!""#); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func.params, + vec![ParamInterned { + name: "s".to_string(), + ty: Ty::ANY, + has_default: false + },] + ); + assert!(!func.variadic); + // s + "!" where s is Any defaults to Number (not String, because Any + String is unhandled) + assert_eq!(func.return_spec, ReturnSpec::Fixed(Ty::NUMBER)); + } + + #[test] + fn test_function_default_number() { + let (ty, env) = infer_doc("function(x=1) x"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: true + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_function_default_string() { + let (ty, env) = infer_doc(r#"function(x="hello") x"#); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::STRING, + has_default: true + }], + return_spec: ReturnSpec::Fixed(Ty::STRING), + variadic: false, + } + ); + } + + #[test] + fn test_function_default_bool() { + let (ty, env) = infer_doc("function(x=true) x"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::TRUE, + has_default: true + }], + return_spec: ReturnSpec::Fixed(Ty::TRUE), + variadic: false, + } + ); + } + + #[test] + fn test_function_default_null() { + let (ty, env) = infer_doc("function(x=null) x"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NULL, + has_default: true + }], + return_spec: ReturnSpec::Fixed(Ty::NULL), + variadic: false, + } + ); + } + + #[test] + fn test_function_no_default() { + let (ty, env) = infer_doc("function(x) x"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + }], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + } + ); + } + + #[test] + fn test_function_default_expression() { + let (ty, env) = infer_doc("function(x=1+2) x"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: true + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_function_multiple_params_mixed_defaults() { + let (ty, env) = infer_doc(r#"function(a, b=1, c="hello") a + b"#); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ + ParamInterned { + name: "a".to_string(), + ty: Ty::ANY, + has_default: false + }, + ParamInterned { + name: "b".to_string(), + ty: Ty::NUMBER, + has_default: true + }, + ParamInterned { + name: "c".to_string(), + ty: Ty::STRING, + has_default: true + }, + ], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_function_default_param_array() { + let (ty, env) = infer_doc("function(x=[1,2,3]) x"); + let func = try_function(&env, ty).expect("expected function"); + // Default is a tuple [Number, Number, Number] + let param_tuple = try_tuple(&env, func.params[0].ty).expect("expected tuple"); + assert_eq!(param_tuple, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + // Return type is the same tuple + let ReturnSpec::Fixed(ret) = func.return_spec.clone() else { + unreachable!("Expected Fixed return spec"); + }; + let ret_tuple = try_tuple(&env, ret).expect("expected tuple"); + assert_eq!(ret_tuple, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + // Full param structure + assert_eq!( + func.params, + vec![ParamInterned { + name: "x".to_string(), + ty: func.params[0].ty, + has_default: true, + }] + ); + } + + #[test] + fn test_function_default_param_object() { + let default_value = ['{', 'a', ':', '1', '}'].into_iter().collect::(); + let doc = format!("function(x={default_value}) x"); + let (ty, env) = infer_doc(&doc); + let func = try_function(&env, ty).expect("expected function"); + // Default is an object with field 'a' + let param_obj = try_object(&env, func.params[0].ty).expect("expected object"); + assert_fields_ty(¶m_obj, &["a"]); + // Full param structure + assert_eq!( + func.params, + vec![ParamInterned { + name: "x".to_string(), + ty: func.params[0].ty, + has_default: true, + }] + ); + } + + #[test] + fn test_infer_conditional_union() { + let (ty, env) = infer_doc("if true then 1 else \"x\""); + // Should be a union of number and string + let variants = try_union(&env, ty).expect("expected union"); + assert!(variants.contains(&Ty::NUMBER)); + assert!(variants.contains(&Ty::STRING)); + } + + #[test] + fn test_infer_conditional_with_error() { + // If condition is error, the whole expression is Never + let (ty, _) = infer_doc(r#"if error "fail" then 1 else 2"#); + assert_eq!(ty, Ty::NEVER); + } + + #[test] + fn test_stdlib_map_infers_func_return() { + let (ty, env) = infer_doc("std.map(function(x) x + 1, [1, 2, 3])"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_stdlib_filter_preserves_array_type() { + let (ty, env) = infer_doc("std.filter(function(x) x > 0, [1, 2, 3])"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_stdlib_filter_with_std_predicate_narrows_elements() { + let (ty, env) = infer_doc(r#"std.filter(std.isNumber, [1, "x", 2])"#); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_stdlib_filter_map_uses_mapper_return_type() { + let (ty, env) = infer_doc( + r#"local inc(x) = + assert std.isNumber(x); + x + 1; +std.filterMap(std.isNumber, inc, [1, "x", 2])"#, + ); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_stdlib_foldl_returns_accumulator_type() { + let (ty, _) = infer_doc("std.foldl(function(acc, x) acc + x, [1, 2, 3], 0)"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_stdlib_length_returns_non_negative() { + let (ty, env) = infer_doc("std.length([1, 2, 3])"); + let data = get_ty_data(&env, ty); + assert!(matches!(data, TyData::BoundedNumber(_))); + } + + #[test] + fn test_stdlib_abs_returns_non_negative() { + let (ty, env) = infer_doc("std.abs(-5)"); + let data = get_ty_data(&env, ty); + assert!(matches!(data, TyData::BoundedNumber(_))); + } + + #[test] + fn test_stdlib_count_returns_non_negative() { + let (ty, env) = infer_doc("std.count([1, 2, 2, 3], 2)"); + let data = get_ty_data(&env, ty); + assert!(matches!(data, TyData::BoundedNumber(_))); + } + + #[test] + fn test_stdlib_type_returns_string() { + let (ty, _) = infer_doc("std.type(42)"); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_stdlib_object_values_infers_field_types() { + let (ty, env) = infer_doc("std.objectValues({a: 1, b: 2})"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_stdlib_object_values_mixed_types() { + let (ty, env) = infer_doc(r#"std.objectValues({x: 1, y: "str"})"#); + let elem = try_array(&env, ty).expect("expected array"); + let variants = try_union(&env, elem).expect("expected union"); + assert!(variants.contains(&Ty::NUMBER)); + assert!(variants.contains(&Ty::STRING)); + } + + #[test] + fn test_stdlib_field_access_returns_function() { + // Accessing std.map (without calling) should return a function type + let (ty, env) = infer_doc("std.map"); + let _ = try_function(&env, ty).expect("expected function"); + } + + #[test] + fn test_comprehension_variable_type() { + // [x + 1 for x in [1, 2, 3]] - x should be inferred as Number + // Result should be Array + let (ty, env) = infer_doc("[x + 1 for x in [1, 2, 3]]"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_comprehension_field_access() { + // [x.name for x in arr] where arr is [{name: "a"}] + // The loop variable x should have the element type from the array + let (ty, env) = + infer_doc(r#"local arr = [{name: "a"}, {name: "b"}]; [x.name for x in arr]"#); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::STRING); + } + + #[test] + fn test_comprehension_with_any_iterator() { + // When iterator type is unknown, loop var should be Any + let (ty, env) = infer_doc("local getItems = function() []; [x for x in getItems()]"); + // Result should still be an array + let _ = try_array(&env, ty).expect("expected array"); + } + + #[test] + fn test_comprehension_if_filter_narrows_loop_var() { + let (ty, env) = infer_doc("local xs = [1, null, 2]; [x for x in xs if x != null]"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_comprehension_if_filter_with_all_map_predicate() { + let (ty, env) = infer_doc( + r"local xs = [1, null, 2]; +assert std.all(std.map(function(x) x == null || std.isNumber(x), xs)); +[x for x in xs if x != null]", + ); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + // Parameter constraint tests + + #[test] + fn test_parameter_constraint_from_field_access() { + // function(x) x.name - x should be inferred as Object with field "name" + let (ty, env) = infer_doc("function(x) x.name"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func.params.iter().map(|p| &p.name).collect::>(), + vec!["x"] + ); + let param_obj = try_object(&env, func.params[0].ty).expect("expected object"); + assert_fields_ty(¶m_obj, &["name"]); + } + + #[test] + fn test_parameter_constraint_multiple_fields() { + // function(x) x.a + x.b - x should be inferred as Object with fields "a" and "b" + let (ty, env) = infer_doc("function(x) x.a + x.b"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func.params.iter().map(|p| &p.name).collect::>(), + vec!["x"] + ); + let param_obj = try_object(&env, func.params[0].ty).expect("expected object"); + assert_fields_ty(¶m_obj, &["a", "b"]); + } + + #[test] + fn test_parameter_constraint_not_used() { + // function(x) 42 - x is not used, should remain Any + let (ty, env) = infer_doc("function(x) 42"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_parameter_constraint_multiple_params() { + // function(a, b) a.x + b.y - each param gets its own constraints + let (ty, env) = infer_doc("function(a, b) a.x + b.y"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func.params.iter().map(|p| &p.name).collect::>(), + vec!["a", "b"] + ); + + // First param 'a' should have only field 'x' + let obj_a = try_object(&env, func.params[0].ty).expect("expected object"); + assert_fields_ty(&obj_a, &["x"]); + + // Second param 'b' should have only field 'y' + let obj_b = try_object(&env, func.params[1].ty).expect("expected object"); + assert_fields_ty(&obj_b, &["y"]); + } + + /// Helper to infer an expression with an expected type (Ty-native) + /// The `expected_builder` is called with the env's store to create the expected type. + fn infer_with_expected_fn(code: &str, expected_builder: F) -> (Ty, TypeEnv) + where + F: FnOnce(&mut MutStore) -> Ty, + { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expected = expected_builder(env.store_mut()); + let ty = doc.ast().expr().map_or(Ty::ANY, |expr| { + infer_expr_ty_with_expected(&expr, &mut env, Some(expected)) + }); + (ty, env) + } + + #[test] + fn test_bidirectional_empty_array_with_expected() { + // [] with expected Array should get Number element type + let (ty, env) = infer_with_expected_fn("[]", |store| store.array(Ty::NUMBER)); + + // Should be Array, not Array + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_bidirectional_without_expected() { + // [] without expected type should be Array + let (ty, env) = infer_doc("[]"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::ANY); + } + + #[test] + fn test_bidirectional_if_then_else_propagates() { + // if cond then [] else [] with expected Array + let (ty, env) = + infer_with_expected_fn("if true then [] else []", |store| store.array(Ty::STRING)); + + // Both branches should get Array + // Result should be Array (union of both String arrays = String array) + let data = get_ty_data(&env, ty); + match data { + TyData::Array { elem, .. } => assert_eq!(elem, Ty::STRING), + TyData::Union(variants) => { + for v in variants { + let elem = try_array(&env, v).expect("expected array"); + assert_eq!(elem, Ty::STRING); + } + } + _ => unreachable!("Expected Array or Union, got {:?}", data), + } + } + + #[test] + fn test_bidirectional_parened_propagates() { + // (expr) should propagate expected type through + let (ty, env) = infer_with_expected_fn("([])", |store| store.array(Ty::BOOL)); + + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::BOOL); + } + + #[test] + fn test_bidirectional_array_elements() { + // [a, b] where we have no info on a, b but expect Array + // This doesn't magically make unknown expressions typed, but sets the context + // Even without expected type, [1, 2, 3] should be Tuple([Number, Number, Number]) + let (ty, env) = infer_doc("[1, 2, 3]"); + + let elems = try_tuple(&env, ty).expect("expected tuple"); + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + } + + #[test] + fn test_bidirectional_expected_tuple() { + // With expected Tuple type, elements get positional expected types + let (ty, env) = infer_with_expected_fn("[[], []]", |store| { + let arr_num = store.array(Ty::NUMBER); + let arr_str = store.array(Ty::STRING); + store.tuple(vec![arr_num, arr_str]) + }); + + let elems = try_tuple(&env, ty).expect("expected tuple"); + // Extract inner types of each array element and compare full structure + let inner_types: Vec = elems + .iter() + .map(|&e| try_array(&env, e).expect("expected array")) + .collect(); + assert_eq!(inner_types, vec![Ty::NUMBER, Ty::STRING]); + } + + #[rstest] + #[case::is_number( + r#"local x = if true then 1 else "str"; assert std.isNumber(x); x"#, + Ty::NUMBER + )] + #[case::is_string( + r#"local s = if true then "hello" else 42; assert std.isString(s); s"#, + Ty::STRING + )] + #[case::is_boolean( + // With literal boolean inference, `if true then false else 0` evaluates to `false` + // which has type Ty::FALSE (a subtype of BOOL) + r"local b = if true then false else 0; assert std.isBoolean(b); b", + Ty::FALSE + )] + fn test_assert_narrows_primitive_type(#[case] code: &str, #[case] expected: Ty) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + assert_eq!(infer_expr_ty(&expr, &mut env), expected); + } + + #[test] + fn test_assert_narrows_to_array() { + let code = r"local arr = if true then [1, 2] else {}; assert std.isArray(arr); arr"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + // After assert std.isArray(arr), arr should be narrowed to the Tuple from the then-branch + let elems = try_tuple(&env, ty).expect("expected tuple"); + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER]); + } + + #[test] + fn test_assert_narrows_to_object() { + let code = r"local obj = if true then { a: 1 } else [1, 2]; assert std.isObject(obj); obj"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + // After assert std.isObject(obj), obj should be narrowed to the Object from the then-branch + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["a"]); + } + + #[test] + fn test_assert_narrows_null_check() { + let code = r"local x = if true then { a: 1 } else null; assert x != null; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + // After assert x != null, x should be narrowed to the Object from the then-branch + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["a"]); + } + + #[test] + fn test_multiple_assert_statements() { + let code = r#" + local x = if true then 1 else "str"; + local y = if true then [1] else {}; + assert std.isNumber(x); + assert std.isArray(y); + [x, y] + "#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + let elems = try_tuple(&env, ty).expect("expected tuple"); + // First element is narrowed to Number, second to Tuple([Number]) + // Verify complete structure: exactly [Number, Tuple([Number])] + assert!( + matches!(elems.as_slice(), [first, second] if *first == Ty::NUMBER && try_tuple(&env, *second).expect("expected tuple") == vec![Ty::NUMBER]), + "Expected [Number, Tuple([Number])], got {elems:?}" + ); + } + + #[test] + fn test_assert_object_has() { + let code = r#"local obj = {}; assert std.objectHas(obj, "key"); obj"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + assert_eq!(ty, Ty::NEVER); + } + + #[test] + fn test_assert_object_has_narrows_unknown_object() { + let code = r#"local obj = std.parseJson("{}"); assert std.objectHas(obj, "key"); obj"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["key"]); + } + + #[test] + fn test_object_string_index_then_field_access_preserves_type() { + let (ty, _) = infer_doc(r#"local hm = { foo: { a: true } }; hm["foo"].a"#); + assert_eq!(ty, Ty::TRUE); + } + + #[test] + fn test_union_object_field_access_preserves_union_members() { + let (ty, env) = infer_doc( + r#" +local f(x, y, z) = + assert std.isBoolean(x) && std.isBoolean(y) && std.isBoolean(z); + local obj = { + a: + if x then + { b: if y then 1 else false } + else + { b: if z then true else "hi" }, + }; + obj.a.b; +f(true, false, true) +"#, + ); + + let TyData::Union(variants) = get_ty_data(&env, ty) else { + panic!("expected union type, got {:?}", get_ty_data(&env, ty)); + }; + assert!(variants.contains(&Ty::NUMBER)); + assert!(variants.contains(&Ty::STRING)); + assert!( + variants.contains(&Ty::BOOL) + || variants.contains(&Ty::TRUE) + || variants.contains(&Ty::FALSE) + ); + } + + #[rstest] + #[case::normal_visibility("{ a: 1 }", "a", FieldVis::Normal)] + #[case::hidden_visibility("{ a:: 1 }", "a", FieldVis::Hidden)] + #[case::force_visible("{ a::: 1 }", "a", FieldVis::ForceVisible)] + fn test_field_visibility_tracking( + #[case] code: &str, + #[case] field_name: &str, + #[case] expected_visibility: FieldVis, + ) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + let obj = try_object(&env, ty).expect("expected object"); + let field_def = get_field_ty(&obj, field_name).expect("Field should exist"); + assert_eq!(field_def.visibility, expected_visibility); + } + + #[test] + fn test_object_with_mixed_visibility() { + let code = r"{ visible: 1, hidden:: 2, force::: 3 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + let obj = try_object(&env, ty).expect("expected object"); + + assert_eq!( + get_field_ty(&obj, "visible").unwrap().visibility, + FieldVis::Normal + ); + assert_eq!( + get_field_ty(&obj, "hidden").unwrap().visibility, + FieldVis::Hidden + ); + assert_eq!( + get_field_ty(&obj, "force").unwrap().visibility, + FieldVis::ForceVisible + ); + } + + #[test] + fn test_field_is_visible() { + let code = r"{ visible: 1, hidden:: 2 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + let obj = try_object(&env, ty).expect("expected object"); + + assert!(matches!( + get_field_ty(&obj, "visible").unwrap().visibility, + FieldVis::Normal | FieldVis::ForceVisible + )); + assert!(matches!( + get_field_ty(&obj, "hidden").unwrap().visibility, + FieldVis::Hidden + )); + } + + #[test] + fn test_self_reference_simple() { + // self.a should be typed as Number inside the object + let (ty, env) = infer_doc(r"{ a: 1, b: self.a }"); + let obj = try_object(&env, ty).expect("expected object"); + // Field 'b' should have type Any (because self.a is inferred in first pass as Any) + // This is the limitation of the two-pass approach + assert_fields_ty(&obj, &["a", "b"]); + } + + #[test] + fn test_self_knows_fields_exist() { + // self should know which fields exist + let (ty, env) = infer_doc(r"{ a: 1, b: 2, c: self }"); + let obj = try_object(&env, ty).expect("expected object"); + // Field 'c' should be typed as Object with fields a, b, c + let c_field = get_field_ty(&obj, "c").expect("should have c field"); + let c_obj = try_object(&env, c_field.ty).expect("expected object"); + assert_fields_ty(&c_obj, &["a", "b", "c"]); + } + + #[test] + fn test_self_outside_object_is_open() { + // Outside an object, self should return open object + // This is a somewhat artificial test since 'self' outside an object is a runtime error + // We just test that the inference doesn't panic + let (ty, env) = infer_doc("self"); + let obj = try_object(&env, ty).expect("expected object"); + // Should be open (has_unknown = true) + assert!(obj.has_unknown, "self outside object should be open object"); + } + + #[test] + fn test_nested_objects_self() { + // In nested objects, self should refer to innermost object + let (ty, env) = infer_doc(r"{ outer: 1, nested: { inner: 2, self_ref: self } }"); + let obj = try_object(&env, ty).expect("expected object"); + + // Get nested object + let nested_field = get_field_ty(&obj, "nested").expect("should have nested field"); + let nested_obj = try_object(&env, nested_field.ty).expect("expected object"); + + // self_ref should refer to the nested object, not outer + let self_ref_field = + get_field_ty(&nested_obj, "self_ref").expect("should have self_ref field"); + let self_ref_obj = try_object(&env, self_ref_field.ty).expect("expected object"); + + // self_ref should have 'inner' and 'self_ref', but NOT 'outer' + assert_fields_ty(&self_ref_obj, &["inner", "self_ref"]); + } + + #[test] + fn test_object_extension_super() { + // In an object extension, super should refer to the base object + let code = r" + local base = { a: 1, b: 2 }; + base { c: super.a } + "; + let (ty, env) = infer_doc(code); + let obj = try_object(&env, ty).expect("expected object"); + + // Result should have fields a, b, c + assert_fields_ty(&obj, &["a", "b", "c"]); + } + + #[test] + fn test_dollar_root_reference() { + // $ should refer to the outermost object + let (ty, env) = infer_doc(r"{ outer: 1, nested: { inner: 2, root_ref: $ } }"); + let obj = try_object(&env, ty).expect("expected object"); + + // Get nested object + let nested_field = get_field_ty(&obj, "nested").expect("should have nested field"); + let nested_obj = try_object(&env, nested_field.ty).expect("expected object"); + + // root_ref should refer to the outer object + let root_ref_field = + get_field_ty(&nested_obj, "root_ref").expect("should have root_ref field"); + let root_ref_obj = try_object(&env, root_ref_field.ty).expect("expected object"); + + // root_ref (=$) should have 'outer' and 'nested' + assert_fields_ty(&root_ref_obj, &["outer", "nested"]); + } + + #[test] + fn test_object_extension_result_type() { + // Object extension should merge types properly + let code = r#" + local base = { a: 1 }; + base { b: "hello" } + "#; + let (ty, env) = infer_doc(code); + let obj = try_object(&env, ty).expect("expected object"); + + // Result should have both a (Number) and b (String) + assert_fields_ty(&obj, &["a", "b"]); + assert_eq!(get_field_ty(&obj, "a").unwrap().ty, Ty::NUMBER); + assert_eq!(get_field_ty(&obj, "b").unwrap().ty, Ty::STRING); + } + + #[test] + fn test_recursive_function_memoization() { + // Recursive function - should not cause infinite loop + let code = r" + local factorial = function(n) + if n <= 1 then 1 + else n * factorial(n - 1); + factorial + "; + let (ty, env) = infer_doc(code); + + // Should be a function type (not infinite loop) + let _func = try_function(&env, ty).expect("expected function"); + } + + #[test] + fn test_mutually_recursive_functions() { + // Mutually recursive functions + let code = r" + local isEven = function(n) + if n == 0 then true + else isOdd(n - 1); + local isOdd = function(n) + if n == 0 then false + else isEven(n - 1); + { isEven: isEven, isOdd: isOdd } + "; + let (ty, env) = infer_doc(code); + + // Should infer without infinite loop + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["isEven", "isOdd"]); + } + + #[test] + fn test_function_memoization_reuse() { + // Same function referenced multiple times - should be memoized + let code = r" + local f = function(x) x + 1; + { a: f, b: f, c: f } + "; + let (ty, env) = infer_doc(code); + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["a", "b", "c"]); + + // All three fields should have the same function type + let a_ty = get_field_ty(&obj, "a").unwrap().ty; + let b_ty = get_field_ty(&obj, "b").unwrap().ty; + let c_ty = get_field_ty(&obj, "c").unwrap().ty; + let _func = try_function(&env, a_ty).expect("expected function"); + // With type interning, same type = same Ty + assert_eq!(a_ty, b_ty); + assert_eq!(b_ty, c_ty); + } + + #[test] + fn test_deeply_nested_functions() { + // Deeply nested function calls - tests depth limit with memoization + let code = r" + local f1 = function(x) x; + local f2 = function(x) f1(x); + local f3 = function(x) f2(x); + local f4 = function(x) f3(x); + local f5 = function(x) f4(x); + local f6 = function(x) f5(x); + f6 + "; + let (ty, env) = infer_doc(code); + + // Should be a function (memoization helps with deep nesting) + let _func = try_function(&env, ty).expect("expected function"); + } + + #[rstest] + #[case::std_set("std.set([1, 2, 3])", true, Ty::NUMBER)] + #[case::std_uniq(r#"std.uniq(["a", "b", "c"])"#, true, Ty::STRING)] + #[case::std_set_union("std.setUnion([1, 2], [3, 4])", true, Ty::NUMBER)] + #[case::std_set_inter("std.setInter([1, 2], [2, 3])", true, Ty::NUMBER)] + #[case::std_set_diff("std.setDiff([1, 2, 3], [2])", true, Ty::NUMBER)] + #[case::std_filter("std.filter(function(x) true, [1, 2, 3])", false, Ty::NUMBER)] + #[case::std_slice("std.slice([1, 2, 3], 0, 2, 1)", false, Ty::NUMBER)] + #[case::std_map_typed("std.map(function(x) x + 0, [1, 2, 3])", false, Ty::NUMBER)] + fn test_array_is_set_flag( + #[case] code: &str, + #[case] expected_is_set: bool, + #[case] expected_elem: Ty, + ) { + let (ty, env) = infer_doc(code); + let (elem, is_set) = try_array_with_set(&env, ty).expect("expected array type"); + assert_eq!(is_set, expected_is_set, "is_set mismatch for: {code}"); + assert_eq!(elem, expected_elem, "element type mismatch for: {code}"); + } + + #[test] + fn test_std_reverse_preserves_tuple_type() { + // std.reverse with SameAsArg(0) preserves the input type + // A tuple literal stays a tuple after reverse + let (ty, env) = infer_doc("std.reverse([1, 2, 3])"); + let elems = try_tuple(&env, ty).expect("reverse of tuple should be tuple"); + // All elements should be NUMBER + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/expr/base.rs b/crates/jrsonnet-lsp-inference/src/expr/base.rs new file mode 100644 index 00000000..07893d17 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/expr/base.rs @@ -0,0 +1,453 @@ +use jrsonnet_lsp_import::extract_import_path; +use jrsonnet_lsp_scope::var_resolves_to_builtin_std; +use jrsonnet_lsp_types::{FieldDefInterned, FieldVis, NumBounds, ObjectData, Ty, TyData}; +use jrsonnet_rowan_parser::{ + nodes::{BinaryOperatorKind, ExprBase, ImportKindKind, LiteralKind}, + AstNode, +}; + +use super::{ + advanced::{ + infer_array_comp_expr_base_ty, infer_array_expr_base_ty, infer_call_expr_base_ty, + infer_function_expr_base_ty, infer_obj_extend_expr_base_ty, + }, + infer_expr_ty_impl, TypeRecorder, +}; +use crate::{ + env::TypeEnv, + flow::{self, Facts}, + helpers::{extract_var_name_from_expr, infer_stdlib_field_access_ty}, + object::infer_object_type_ty, +}; + +/// Infer the type of a base expression with an optional expected type, returning `Ty`. +/// +/// This is the efficient internal version that works with interned types throughout. +pub(super) fn infer_base_ty( + base: &ExprBase, + env: &mut TypeEnv, + expected: Option, + recorder: &mut R, +) -> Ty { + match base { + // Primitives - return constants directly + ExprBase::ExprLiteral(lit) => { + if let Some(literal) = lit.literal() { + match literal.kind() { + LiteralKind::NullKw => return Ty::NULL, + LiteralKind::TrueKw => return Ty::TRUE, + LiteralKind::FalseKw => return Ty::FALSE, + LiteralKind::SelfKw => { + // `self` refers to the innermost object + return env + .self_ty() + .unwrap_or_else(|| env.store_mut().object_any()); + } + LiteralKind::Dollar => { + // `$` refers to the outermost (root) object + return env + .root_ty() + .unwrap_or_else(|| env.store_mut().object_any()); + } + LiteralKind::SuperKw => { + // `super` refers to the base object being extended + return env + .super_ty() + .unwrap_or_else(|| env.store_mut().object_any()); + } + } + } + Ty::ANY + } + ExprBase::ExprNumber(_) => Ty::NUMBER, + ExprBase::ExprString(_) => Ty::STRING, + ExprBase::ExprError(_) => Ty::NEVER, + + // Variable lookup - use Ty-native lookup + ExprBase::ExprVar(var) => { + if var_resolves_to_builtin_std(var) { + return env.store_mut().object_any(); + } + + var.name() + .and_then(|name| name.ident_lit()) + .and_then(|ident| env.lookup(ident.text())) + .unwrap_or(Ty::ANY) + } + + // Parenthesized expression - recurse with expected type + ExprBase::ExprParened(parens) => { + if let Some(inner) = parens.expr() { + return infer_expr_ty_impl(&inner, env, expected, recorder); + } + Ty::ANY + } + + // Import - try to resolve the type from the import cache + ExprBase::ExprImport(import) => { + let Some(kind) = import.import_kind().map(|token| token.kind()) else { + return Ty::ANY; + }; + match kind { + ImportKindKind::ImportKw => { + let Some(path) = extract_import_path(import) else { + return Ty::ANY; + }; + if path.is_empty() { + return Ty::ANY; + } + // Try to resolve the import type, fall back to ANY + env.resolve_import(&path).map_or(Ty::ANY, Ty::from) + } + ImportKindKind::ImportstrKw => Ty::STRING, + ImportKindKind::ImportbinKw => { + let byte_ty = env + .store_mut() + .bounded_number(NumBounds::between(0.0, 255.0)); + env.store_mut().array(byte_ty) + } + } + } + + // Unary operators + ExprBase::ExprUnary(unary) => { + let rhs_ty = unary + .rhs() + .map_or(Ty::ANY, |rhs| infer_expr_ty_impl(&rhs, env, None, recorder)); + if rhs_ty == Ty::NEVER { + return Ty::NEVER; + } + let Some(op_kind) = unary.unary_operator().map(|op| op.kind()) else { + return Ty::ANY; + }; + if op_kind.returns_boolean() { + return Ty::BOOL; + } + if op_kind.returns_number() { + return Ty::NUMBER; + } + Ty::ANY + } + + // Binary operators - handle simple cases directly + ExprBase::ExprBinary(binary) => infer_binary_expr_base_ty(binary, env, recorder), + + // Index access: arr[0], obj["field"], str[0] + ExprBase::ExprIndex(idx) => infer_index_expr_base_ty(idx, env, recorder), + + // Slice: arr[1:3], str[::2] + ExprBase::ExprSlice(slice) => infer_slice_expr_base_ty(slice, env, recorder), + + // If-then-else with flow typing + ExprBase::ExprIfThenElse(if_expr) => { + infer_if_then_else_expr_base_ty(if_expr, env, expected, recorder) + } + + // Field access: obj.field or std.fn + ExprBase::ExprField(field) => infer_field_expr_base_ty(field, env, recorder), + + // Function call: fn(args) + ExprBase::ExprCall(call) => infer_call_expr_base_ty(call, env, recorder), + + // Array literal: [a, b, c] + ExprBase::ExprArray(arr) => infer_array_expr_base_ty(arr, env, expected, recorder), + + // Array comprehension: [expr for x in arr] + ExprBase::ExprArrayComp(comp) => infer_array_comp_expr_base_ty(comp, env, recorder), + + // Object literal: { field: value } + ExprBase::ExprObject(obj) => { + infer_object_type_ty(obj.obj_body().as_ref(), env, &mut |expr, env| { + infer_expr_ty_impl(expr, env, None, recorder) + }) + } + + // Function definition: function(x) body + ExprBase::ExprFunction(func) => infer_function_expr_base_ty(func, env, recorder), + + // Object extension: base { ... } + ExprBase::ExprObjExtend(extend) => infer_obj_extend_expr_base_ty(extend, env, recorder), + } +} + +fn infer_binary_expr_base_ty( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + let lhs_ty = binary + .lhs() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + if lhs_ty == Ty::NEVER { + return Ty::NEVER; + } + let rhs_ty = binary + .rhs() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + if rhs_ty == Ty::NEVER { + return Ty::NEVER; + } + + let Some(op_kind) = binary.binary_operator().map(|op| op.kind()) else { + return Ty::ANY; + }; + + if op_kind == BinaryOperatorKind::Modulo && lhs_ty == Ty::STRING { + return Ty::STRING; + } + if op_kind.returns_number() { + return Ty::NUMBER; + } + if op_kind.returns_boolean() { + return Ty::BOOL; + } + if op_kind == BinaryOperatorKind::Plus { + if lhs_ty == Ty::STRING && rhs_ty == Ty::STRING { + return Ty::STRING; + } + if lhs_ty == Ty::NUMBER && rhs_ty == Ty::NUMBER { + return Ty::NUMBER; + } + let store = env.store_mut(); + let lhs_data = store.get(lhs_ty); + let rhs_data = store.get(rhs_ty); + return match (&lhs_data, &rhs_data) { + (TyData::Array { elem: l, .. }, TyData::Array { elem: r, .. }) => { + let elem_union = store.union(vec![*l, *r]); + store.array(elem_union) + } + (TyData::Tuple { elems: l }, TyData::Tuple { elems: r }) => { + let mut elems = l.clone(); + elems.extend(r.iter().copied()); + store.tuple(elems) + } + (TyData::Object(left_obj), TyData::Object(right_obj)) => { + let merged = ObjectData::merge(left_obj, right_obj); + store.object(merged) + } + _ => Ty::NUMBER, + }; + } + if op_kind.is_logical_short_circuit() { + return env.store_mut().union(vec![lhs_ty, rhs_ty]); + } + Ty::ANY +} + +fn infer_index_expr_base_ty( + idx: &jrsonnet_rowan_parser::nodes::ExprIndex, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + let base_ty = idx + .base() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + let mut index_literal = None; + if let Some(index_expr) = idx.index() { + let idx_ty = infer_expr_ty_impl(&index_expr, env, None, recorder); + if idx_ty == Ty::NEVER { + return Ty::NEVER; + } + index_literal = extract_string_literal(&index_expr); + } + if base_ty == Ty::STRING { + return Ty::STRING; + } + let store = env.store_mut(); + match store.get(base_ty) { + TyData::Array { elem, .. } => elem, + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + store.union(elems_copy) + } + TyData::Object(_) | TyData::Union(_) => { + if let Some(field_name) = index_literal.as_deref() { + return object_field_ty(base_ty, field_name, store).unwrap_or(Ty::ANY); + } + Ty::ANY + } + _ => Ty::ANY, + } +} + +fn object_field_ty( + ty: Ty, + field_name: &str, + store: &mut jrsonnet_lsp_types::MutStore, +) -> Option { + match store.get(ty) { + TyData::Object(obj) => obj + .fields + .iter() + .find(|(name, _)| name == field_name) + .map(|(_, field)| field.ty) + .or_else(|| obj.has_unknown.then_some(Ty::ANY)), + TyData::Union(types) => { + let field_types = types + .into_iter() + .filter_map(|variant| object_field_ty(variant, field_name, store)) + .collect::>(); + if field_types.is_empty() { + None + } else { + Some(store.union(field_types)) + } + } + _ => None, + } +} + +fn extract_string_literal(expr: &jrsonnet_rowan_parser::nodes::Expr) -> Option { + let base = expr.expr_base()?; + let ExprBase::ExprString(s) = base else { + return None; + }; + let text = s.syntax().first_token()?.text().to_string(); + if (text.starts_with('"') && text.ends_with('"')) + || (text.starts_with('\'') && text.ends_with('\'')) + { + return Some(text[1..text.len() - 1].to_string()); + } + None +} + +fn infer_slice_expr_base_ty( + slice: &jrsonnet_rowan_parser::nodes::ExprSlice, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + let base_ty = slice + .base() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + let store = env.store_mut(); + match store.get(base_ty) { + TyData::Array { .. } => base_ty, + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + let elem_union = store.union(elems_copy); + store.array(elem_union) + } + _ if base_ty == Ty::STRING => Ty::STRING, + _ => Ty::ANY, + } +} + +fn infer_if_then_else_expr_base_ty( + if_expr: &jrsonnet_rowan_parser::nodes::ExprIfThenElse, + env: &mut TypeEnv, + expected: Option, + recorder: &mut R, +) -> Ty { + let facts = if let Some(cond) = if_expr.cond() { + let cond_ty = infer_expr_ty_impl(&cond, env, None, recorder); + if cond_ty == Ty::NEVER { + return Ty::NEVER; + } + flow::extract_facts(&cond) + } else { + Facts::new() + }; + + let then_ty = if_expr.then().map_or(Ty::ANY, |then_clause| { + then_clause.expr().map_or(Ty::ANY, |then_expr| { + env.push_scope(); + for (var_name, fact) in facts.iter() { + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); + env.define_ty(var_name.clone(), narrowed_ty); + } + let ty = infer_expr_ty_impl(&then_expr, env, expected, recorder); + env.pop_scope(); + ty + }) + }); + + let else_ty = if_expr.else_().map_or(Ty::ANY, |else_clause| { + else_clause.expr().map_or(Ty::ANY, |else_expr| { + env.push_scope(); + for (var_name, fact) in facts.iter() { + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + let widened_ty = fact.apply_negated(current_ty, env.store_mut()); + env.define_ty(var_name.clone(), widened_ty); + } + let ty = infer_expr_ty_impl(&else_expr, env, expected, recorder); + env.pop_scope(); + ty + }) + }); + + env.store_mut().union(vec![then_ty, else_ty]) +} + +fn infer_field_expr_base_ty( + field: &jrsonnet_rowan_parser::nodes::ExprField, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + if let Some(ty) = infer_stdlib_field_access_ty(field, env) { + if let Some(base_expr) = field.base() { + let _ = infer_expr_ty_impl(&base_expr, env, None, recorder); + } + return ty; + } + + if let Some(base_expr) = field.base() { + if let Some(var_name) = extract_var_name_from_expr(&base_expr) { + if env.is_tracked_param(&var_name) { + let field_name = field + .field() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()); + if let Some(fn_name) = field_name { + let obj_data = ObjectData { + fields: vec![( + fn_name, + FieldDefInterned { + ty: Ty::ANY, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }; + let constraint = env.store_mut().object(obj_data); + env.add_constraint_ty(&var_name, constraint); + } else { + let constraint = env.store_mut().object_any(); + env.add_constraint_ty(&var_name, constraint); + } + } + } + } + + let base_ty = field + .base() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + if base_ty == Ty::STRING { + return Ty::STRING; + } + + let field_name = field + .field() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()); + + let Some(field_name) = field_name.as_deref() else { + return Ty::ANY; + }; + let store = env.store_mut(); + if let Some(ty) = object_field_ty(base_ty, field_name, store) { + return ty; + } + + Ty::ANY +} diff --git a/crates/jrsonnet-lsp-inference/src/expr/core.rs b/crates/jrsonnet-lsp-inference/src/expr/core.rs new file mode 100644 index 00000000..86de4da7 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/expr/core.rs @@ -0,0 +1,378 @@ +//! Expression type inference. + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_types::{FunctionData, ParamInterned, ReturnSpec, Ty, TyData}; +use jrsonnet_rowan_parser::{nodes::Bind, AstNode}; +use rowan::TextRange; +use rustc_hash::FxHashMap; + +use super::base::infer_base_ty; +use crate::{ + env::TypeEnv, + flow::{self, Facts}, + helpers::extract_params_with_default_types_ty, +}; + +/// Apply type facts to the environment, narrowing variable types. +/// +/// This is used to apply facts extracted from assert statements and +/// type guard conditions to narrow types for subsequent code. +fn apply_facts_to_env(facts: &Facts, env: &mut TypeEnv) { + for (var_name, fact) in facts.iter() { + // Get the current type of the variable + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + // Apply the fact to narrow the type + let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); + // Update the environment with the narrowed type + env.define_ty(var_name.clone(), narrowed_ty); + } +} + +pub(crate) trait TypeRecorder { + fn record(&mut self, range: TextRange, ty: Ty); +} + +struct NoopRecorder; + +impl TypeRecorder for NoopRecorder { + fn record(&mut self, _range: TextRange, _ty: Ty) {} +} + +impl TypeRecorder for FxHashMap { + fn record(&mut self, range: TextRange, ty: Ty) { + self.insert(range, ty); + } +} + +fn record_expr_and_base( + recorder: &mut R, + expr: &jrsonnet_rowan_parser::nodes::Expr, + ty: Ty, +) { + recorder.record(expr.syntax().text_range(), ty); + if let Some(base) = expr.expr_base() { + recorder.record(base.syntax().text_range(), ty); + } +} + +/// Infer the type of a document's root expression, returning an interned `Ty` and the environment. +/// +/// This is useful for tests that need to inspect the type structure using `TyData`. +#[must_use] +pub fn infer_document_type_ty(document: &Document) -> (Ty, TypeEnv) { + let ast = document.ast(); + let mut env = TypeEnv::new_default(); + + let ty = ast + .expr() + .map_or(Ty::ANY, |expr| infer_expr_ty(&expr, &mut env)); + + (ty, env) +} + +/// Infer the type of an expression, returning an interned `Ty`. +pub fn infer_expr_ty(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> Ty { + infer_expr_ty_with_expected(expr, env, None) +} + +pub(crate) fn infer_expr_ty_and_record( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, + recorder: &mut FxHashMap, +) -> Ty { + infer_expr_ty_impl(expr, env, expected, recorder) +} + +/// Infer the type of an expression with an optional expected type, returning `Ty`. +/// +/// This is the efficient internal version that works with interned types throughout. +pub fn infer_expr_ty_with_expected( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, +) -> Ty { + let mut recorder = NoopRecorder; + infer_expr_ty_impl(expr, env, expected, &mut recorder) +} + +pub(crate) fn infer_expr_ty_impl( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, + recorder: &mut R, +) -> Ty { + // First, handle local bindings and assert statements that may precede the expression + for stmt in expr.stmts() { + match stmt { + jrsonnet_rowan_parser::nodes::Stmt::StmtLocal(stmt_local) => { + for bind in stmt_local.binds() { + infer_bind_type_ty(&bind, env, recorder); + } + } + jrsonnet_rowan_parser::nodes::Stmt::StmtAssert(stmt_assert) => { + // Extract type facts from assert conditions and apply them + if let Some(assertion) = stmt_assert.assertion() { + if let Some(cond) = assertion.condition() { + let facts = flow::extract_facts(&cond); + apply_facts_to_env(&facts, env); + } + } + } + } + } + + // Get the base expression type + let ty = expr.expr_base().map_or(Ty::ANY, |base| { + infer_base_ty(&base, env, expected, recorder) + }); + record_expr_and_base(recorder, expr, ty); + ty +} + +/// Infer types from a bind (local variable definition) using interned types. +fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: &mut R) { + match bind { + Bind::BindDestruct(bd) => { + let ty = bd.value().map_or(Ty::ANY, |value| { + infer_expr_ty_impl(&value, env, None, recorder) + }); + recorder.record(bd.syntax().text_range(), ty); + if let Some(assign_token) = bd.assign_token() { + recorder.record(assign_token.text_range(), ty); + } + if let Some(destruct) = bd.into() { + record_destruct_binding_types_ty(&destruct, ty, env, recorder); + bind_destruct_with_type_ty(&destruct, ty, env); + } + } + Bind::BindFunction(bf) => { + let Some(name_node) = bf.name() else { + return; + }; + let Some(ident) = name_node.ident_lit() else { + return; + }; + let name = ident.text().to_string(); + let params_desc = bf.params(); + let params = params_desc + .as_ref() + .map(|p| extract_params_with_default_types_ty(p, env)) + .unwrap_or_default(); + + // Install a provisional function first so recursive self-calls can resolve. + let provisional_func = FunctionData { + params: params.clone(), + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }; + let provisional_ty = env.store_mut().intern(TyData::Function(provisional_func)); + env.define_ty(name.clone(), provisional_ty); + + let (return_ty, param_constraints) = if env.can_infer_function_body() { + bf.value().map_or_else( + || (Ty::ANY, FxHashMap::default()), + |body| { + env.push_scope(); + let param_names: Vec = + params.iter().map(|p| p.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } + + env.start_constraint_tracking(¶m_names); + env.enter_function(); + let body_ty = infer_expr_ty_impl(&body, env, None, recorder); + env.exit_function(); + let constraints = env.stop_constraint_tracking_ty(); + env.pop_scope(); + (body_ty, constraints) + }, + ) + } else { + (Ty::ANY, FxHashMap::default()) + }; + + let final_params: Vec = params + .into_iter() + .map(|param| { + let mut narrowed_ty = param.ty; + if let Some(constraints) = param_constraints.get(¶m.name) { + for constraint_ty in constraints { + narrowed_ty = env.store_mut().narrow(narrowed_ty, *constraint_ty); + } + } + ParamInterned { + name: param.name, + ty: narrowed_ty, + has_default: param.has_default, + } + }) + .collect(); + + let final_func = FunctionData { + params: final_params.clone(), + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }; + let final_ty = env.store_mut().intern(TyData::Function(final_func)); + recorder.record(bf.syntax().text_range(), final_ty); + recorder.record(name_node.syntax().text_range(), final_ty); + if let Some(assign_token) = bf.assign_token() { + recorder.record(assign_token.text_range(), final_ty); + } + if let Some(params_desc) = params_desc { + recorder.record(params_desc.syntax().text_range(), final_ty); + for (param, param_ty) in params_desc.params().zip(final_params.iter()) { + recorder.record(param.syntax().text_range(), param_ty.ty); + if let Some(destruct) = param.destruct() { + recorder.record(destruct.syntax().text_range(), param_ty.ty); + } + if let Some(assign_token) = param.assign_token() { + recorder.record(assign_token.text_range(), param_ty.ty); + } + } + } + env.define_ty(name, final_ty); + } + } +} + +/// Bind a destructuring pattern with an interned type. +/// +/// This is used for comprehension variables where we know the element type +/// from the iterator expression. +pub(crate) fn bind_destruct_with_type_ty( + destruct: &jrsonnet_rowan_parser::nodes::Destruct, + ty: Ty, + env: &mut TypeEnv, +) { + use jrsonnet_rowan_parser::nodes::Destruct; + + match destruct { + Destruct::DestructFull(full) => { + let Some(ident) = full.name().and_then(|n| n.ident_lit()) else { + return; + }; + env.define_ty(ident.text().to_string(), ty); + } + Destruct::DestructArray(arr) => { + let elem_types = extract_array_element_types_ty(ty, env); + let mut elem_index = 0usize; + for part in arr.destruct_array_parts() { + let jrsonnet_rowan_parser::nodes::DestructArrayPart::DestructArrayElement(elem) = + part + else { + continue; + }; + let Some(inner) = elem.destruct() else { + elem_index += 1; + continue; + }; + let elem_ty = array_destruct_element_ty(&elem_types, elem_index); + bind_destruct_with_type_ty(&inner, elem_ty, env); + elem_index += 1; + } + } + Destruct::DestructObject(obj) => { + for field in obj.destruct_object_fields() { + let Some(field_name) = field.field().and_then(|name| name.ident_lit()) else { + continue; + }; + let field_ty = lookup_destruct_field_type_ty(field_name.text(), ty, env); + let Some(inner) = field.destruct() else { + env.define_ty(field_name.text().to_string(), field_ty); + continue; + }; + bind_destruct_with_type_ty(&inner, field_ty, env); + } + } + Destruct::DestructSkip(_) => {} + } +} + +fn record_destruct_binding_types_ty( + destruct: &jrsonnet_rowan_parser::nodes::Destruct, + ty: Ty, + env: &TypeEnv, + recorder: &mut R, +) { + use jrsonnet_rowan_parser::nodes::Destruct; + + match destruct { + Destruct::DestructFull(full) => { + let Some(name) = full.name() else { + return; + }; + recorder.record(name.syntax().text_range(), ty); + } + Destruct::DestructArray(arr) => { + let elem_types = extract_array_element_types_ty(ty, env); + let mut elem_index = 0usize; + for part in arr.destruct_array_parts() { + let jrsonnet_rowan_parser::nodes::DestructArrayPart::DestructArrayElement(elem) = + part + else { + continue; + }; + let Some(inner) = elem.destruct() else { + elem_index += 1; + continue; + }; + let elem_ty = array_destruct_element_ty(&elem_types, elem_index); + record_destruct_binding_types_ty(&inner, elem_ty, env, recorder); + elem_index += 1; + } + } + Destruct::DestructObject(obj) => { + for field in obj.destruct_object_fields() { + let Some(field_name) = field.field().and_then(|name| name.ident_lit()) else { + continue; + }; + let field_ty = lookup_destruct_field_type_ty(field_name.text(), ty, env); + let Some(inner) = field.destruct() else { + recorder.record(field_name.text_range(), field_ty); + continue; + }; + record_destruct_binding_types_ty(&inner, field_ty, env, recorder); + } + } + Destruct::DestructSkip(_) => {} + } +} + +/// Extract element types from an array or tuple type (Ty version). +fn extract_array_element_types_ty(ty: Ty, env: &TypeEnv) -> Vec { + let store = env.store(); + match store.get(ty) { + TyData::Tuple { ref elems } => elems.clone(), + TyData::Array { elem, .. } => vec![elem], + _ => vec![], + } +} + +fn array_destruct_element_ty(elem_types: &[Ty], index: usize) -> Ty { + match elem_types { + [] => Ty::ANY, + [elem] => *elem, + _ => elem_types.get(index).copied().unwrap_or(Ty::ANY), + } +} + +/// Look up the type for a destructured field from an object type (Ty version). +fn lookup_destruct_field_type_ty(field_name: &str, ty: Ty, env: &TypeEnv) -> Ty { + let store = env.store(); + let TyData::Object(ref obj_data) = store.get(ty) else { + return Ty::ANY; + }; + + obj_data.get_field(field_name).map_or(Ty::ANY, |fd| fd.ty) +} + +/// Check if an expression is guaranteed to diverge (never return). +/// +/// An expression diverges if it has type `Never` - meaning it always +/// throws an error. This is used for unreachable code detection. +pub fn is_divergent(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> bool { + infer_expr_ty(expr, env).is_never() +} diff --git a/crates/jrsonnet-lsp-inference/src/expr/mod.rs b/crates/jrsonnet-lsp-inference/src/expr/mod.rs new file mode 100644 index 00000000..dcdc09d7 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/expr/mod.rs @@ -0,0 +1,10 @@ +//! Expression type inference. + +mod advanced; +mod base; +mod core; + +pub(crate) use core::{ + bind_destruct_with_type_ty, infer_expr_ty_and_record, infer_expr_ty_impl, TypeRecorder, +}; +pub use core::{infer_document_type_ty, infer_expr_ty, infer_expr_ty_with_expected, is_divergent}; diff --git a/crates/jrsonnet-lsp-inference/src/flow/extract.rs b/crates/jrsonnet-lsp-inference/src/flow/extract.rs new file mode 100644 index 00000000..ec7a0c5b --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/flow/extract.rs @@ -0,0 +1,1314 @@ +use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, var_resolves_to_builtin_std}; +use jrsonnet_rowan_parser::{ + nodes::{ArgsDesc, BinaryOperatorKind, Expr, ExprBase, ExprCall, LiteralKind}, + AstNode, AstToken, +}; + +use super::{Fact, FactRepr, Facts, PrimFact, Totality}; + +pub fn extract_facts(cond: &Expr) -> Facts { + let mut facts = Facts::new(); + extract_facts_into(cond, &mut facts); + facts +} + +fn extract_facts_into(cond: &Expr, facts: &mut Facts) { + let Some(base) = cond.expr_base() else { + return; + }; + + match base { + ExprBase::ExprCall(call) => { + // Could be std.isX(var) or std.objectHas(var, "field") + if let Some((var_name, fact)) = check_std_call(&call) { + facts.add(var_name, fact); + } + } + ExprBase::ExprBinary(binary) => { + extract_binary_facts(&binary, facts); + } + ExprBase::ExprUnary(unary) => { + let Some(op) = unary.unary_operator() else { + return; + }; + if !op.kind().is_logical_not() { + return; + } + // !expr - extract facts from inner and negate + if let Some(inner) = unary.rhs() { + let inner_facts = extract_facts(&inner); + let negated = inner_facts.negate(); + for (var_name, fact) in negated.facts { + facts.add(var_name, fact); + } + } + } + ExprBase::ExprParened(parens) => { + if let Some(inner) = parens.expr() { + extract_facts_into(&inner, facts); + } + } + _ => {} + } +} + +/// Extract facts from a binary expression. +fn extract_binary_facts(binary: &jrsonnet_rowan_parser::nodes::ExprBinary, facts: &mut Facts) { + let (Some(lhs), Some(rhs), Some(op)) = (binary.lhs(), binary.rhs(), binary.binary_operator()) + else { + return; + }; + + let op_kind = op.kind(); + + if op_kind == BinaryOperatorKind::Eq { + // var == null or null == var + if let Some((var_name, fact)) = check_null_equality(&lhs, &rhs, Totality::Total) { + facts.add(var_name, fact); + } else if let Some((var_name, fact)) = check_null_equality(&rhs, &lhs, Totality::Total) { + facts.add(var_name, fact); + } + // var == "literal" or "literal" == var + if let Some((var_name, fact)) = check_literal_string_equality(&lhs, &rhs) { + facts.add(var_name, fact); + } else if let Some((var_name, fact)) = check_literal_string_equality(&rhs, &lhs) { + facts.add(var_name, fact); + } + // var == true/false or true/false == var + if let Some((var_name, fact)) = check_literal_bool_equality(&lhs, &rhs) { + facts.add(var_name, fact); + } else if let Some((var_name, fact)) = check_literal_bool_equality(&rhs, &lhs) { + facts.add(var_name, fact); + } + // std.type(x) == "typename" + if let Some((var_name, fact)) = check_std_type_comparison(binary, &rhs) { + facts.add(var_name, fact); + } + // std.length(x) == n + if let Some((var_name, fact)) = check_std_length_comparison(binary, &rhs) { + facts.add(var_name, fact); + } + return; + } + + if op_kind == BinaryOperatorKind::Ne { + // var != null - create fact and negate it + if let Some((var_name, fact)) = check_null_equality(&lhs, &rhs, Totality::Total) { + facts.add(var_name, !fact); + } else if let Some((var_name, fact)) = check_null_equality(&rhs, &lhs, Totality::Total) { + facts.add(var_name, !fact); + } + // var != "literal" or "literal" != var + if let Some((var_name, fact)) = check_literal_string_equality(&lhs, &rhs) { + facts.add(var_name, !fact); + } else if let Some((var_name, fact)) = check_literal_string_equality(&rhs, &lhs) { + facts.add(var_name, !fact); + } + // var != true/false or true/false != var + if let Some((var_name, fact)) = check_literal_bool_equality(&lhs, &rhs) { + facts.add(var_name, !fact); + } else if let Some((var_name, fact)) = check_literal_bool_equality(&rhs, &lhs) { + facts.add(var_name, !fact); + } + // std.length(x) != 0 means non-empty + if let Some((var_name, fact)) = check_std_length_not_zero(binary, &rhs) { + facts.add(var_name, fact); + } + return; + } + + if op_kind == BinaryOperatorKind::Gt { + // std.length(x) > n means length >= n+1 + if let Some((var_name, fact)) = check_std_length_greater(binary, &rhs) { + facts.add(var_name, fact); + } + return; + } + + if op_kind == BinaryOperatorKind::Ge { + // std.length(x) >= n means length >= n + if let Some((var_name, fact)) = check_std_length_greater_eq(binary, &rhs) { + facts.add(var_name, fact); + } + return; + } + + if op_kind.is_membership() { + // "field" in obj + if let Some((var_name, fact)) = check_in_operator(&lhs, &rhs) { + facts.add(var_name, fact); + } + return; + } + + if op_kind.is_logical_and() { + // a && b - extract facts from both sides + extract_facts_into(&lhs, facts); + extract_facts_into(&rhs, facts); + return; + } + + if op_kind.is_logical_or() { + // a || b - only keep facts that are in both + let lhs_facts = extract_facts(&lhs); + let rhs_facts = extract_facts(&rhs); + let combined = lhs_facts.or_combine(rhs_facts); + for (var_name, fact) in combined.facts { + facts.add(var_name, fact); + } + } +} + +/// Result of extracting a std.methodName call. +struct StdMethodCall { + method_name: String, + args: ArgsDesc, +} + +/// Reference to a variable with optional field path. +/// +/// For `x.a.b`, this stores: +/// - `var_name = "x"` +/// - `path = ["b", "a"]` (innermost-to-outermost field order) +struct VarRef { + var_name: String, + path: Vec, +} + +/// Check if an `ExprCall` is a call to std.methodName and extract the method name and args. +fn extract_std_method_call(call: &ExprCall) -> Option { + let callee = call.callee()?; + let ExprBase::ExprField(field) = callee.expr_base()? else { + return None; + }; + + let base = field.base()?; + if !expr_resolves_to_builtin_std(&base) { + return None; + } + + let method_name = field.field()?.ident_lit()?.text().to_string(); + let args = call.args_desc()?; + Some(StdMethodCall { method_name, args }) +} + +/// Extract a std method call from the LHS of a binary expression. +fn extract_std_method_from_binary( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, +) -> Option { + let lhs = binary.lhs()?; + let ExprBase::ExprCall(call) = lhs.expr_base()? else { + return None; + }; + extract_std_method_call(&call) +} + +fn with_var_path(path: Vec, fact: Fact) -> Fact { + path.into_iter() + .fold(fact, |inner, field| Fact::has_field_typed(field, inner)) +} + +fn bind_fact_to_expr(expr: &Expr, fact: Fact) -> Option<(String, Fact)> { + let var_ref = extract_var_ref(expr)?; + let fact = with_var_path(var_ref.path, fact); + Some((var_ref.var_name, fact)) +} + +/// Check for std.isX(var) or std.objectHas(var, "field") pattern in an `ExprCall`. +fn check_std_call(call: &ExprCall) -> Option<(String, Fact)> { + let std_call = extract_std_method_call(call)?; + + // Try std.isX patterns first + if let Some(fact) = unary_std_fn_fact(&std_call.method_name) { + let arg_expr = extract_single_arg_expr(&std_call.args)?; + return bind_fact_to_expr(&arg_expr, fact); + } + + // Try std.objectHas(obj, "field") + if std_call.method_name == "objectHas" + || std_call.method_name == "objectHasAll" + || std_call.method_name == "objectHasEx" + { + let arg_list: Vec<_> = std_call.args.args().collect(); + let ([obj_arg, field_arg] | [obj_arg, field_arg, ..]) = arg_list.as_slice() else { + return None; + }; + let obj_expr = obj_arg.expr()?; + let field_name = extract_string_literal(&field_arg.expr()?)?; + return bind_fact_to_expr(&obj_expr, Fact::has_field(field_name)); + } + + // Try std.all(std.map(predicate, arr)) pattern + if std_call.method_name == "all" { + return check_higher_order_predicate(&std_call.args); + } + + None +} + +/// Check for std.all(std.map(predicate, arr)) pattern. +/// +/// When `std.all(std.map(std.isNumber, arr))` is true, we know all elements +/// of `arr` are numbers, so we can narrow `arr` to `Array`. +fn check_higher_order_predicate(all_args: &ArgsDesc) -> Option<(String, Fact)> { + let arg_list: Vec<_> = all_args.args().collect(); + let [map_arg] = arg_list.as_slice() else { + return None; + }; + + // The argument should be std.map(predicate, arr) + let map_expr = map_arg.expr()?; + let ExprBase::ExprCall(map_call) = map_expr.expr_base()? else { + return None; + }; + + let map_std_call = extract_std_method_call(&map_call)?; + if map_std_call.method_name != "map" { + return None; + } + + let map_args: Vec<_> = map_std_call.args.args().collect(); + let [pred_arg, arr_arg] = map_args.as_slice() else { + return None; + }; + + // First arg is the predicate (e.g., std.isNumber) + let pred_expr = pred_arg.expr()?; + let elem_fact = extract_predicate_element_fact(&pred_expr)?; + + // Second arg is the array variable + let arr_expr = arr_arg.expr()?; + bind_fact_to_expr(&arr_expr, elem_fact) +} + +/// Extract the element type fact from a predicate expression. +/// +/// For `std.isNumber`, returns `Fact::array_elem_number(Totality::Total)`. +fn extract_predicate_element_fact(pred: &Expr) -> Option { + extract_std_predicate_element_fact(pred) + .or_else(|| extract_function_predicate_element_fact(pred)) +} + +/// Extract an array-element narrowing fact from a predicate expression. +/// +/// This supports stdlib predicates (`std.isNumber`) and simple single-parameter +/// function predicates (`function(x) x == null || std.isNumber(x)`). +#[must_use] +pub fn extract_array_predicate_fact(pred: &Expr) -> Option { + extract_predicate_element_fact(pred) +} + +fn extract_std_predicate_element_fact(pred: &Expr) -> Option { + use jrsonnet_std_sig::{get_flow_typing, NarrowsTo, Totality as SigTotality}; + + // Check if it's a std.isX function reference + let ExprBase::ExprField(field) = pred.expr_base()? else { + return None; + }; + + let base = field.base()?; + if !expr_resolves_to_builtin_std(&base) { + return None; + } + + let method_ident = field.field()?.ident_lit()?; + let method_name = method_ident.text(); + + // Look up flow typing info from spec + let ft = get_flow_typing(method_name)?; + let totality = match ft.totality { + SigTotality::Total => Totality::Total, + SigTotality::Partial => Totality::Partial, + }; + + // Map narrowing type to array element fact + Some(match ft.narrows_to { + NarrowsTo::Number => Fact::array_elem_number(totality), + NarrowsTo::String => Fact::array_elem_string(totality), + NarrowsTo::Bool => Fact::array_elem_bool(totality), + NarrowsTo::Array => Fact::array_elem_array(totality), + NarrowsTo::Object => Fact::array_elem_object(totality), + NarrowsTo::Function => Fact::array_elem_function(totality), + NarrowsTo::Null => Fact { + repr: FactRepr::ArrayElemType(PrimFact::Null, totality), + }, + }) +} + +fn extract_function_predicate_element_fact(pred: &Expr) -> Option { + let ExprBase::ExprFunction(function) = pred.expr_base()? else { + return None; + }; + let params_desc = function.params_desc()?; + let params: Vec<_> = params_desc.params().collect(); + let [param] = params.as_slice() else { + return None; + }; + if param.assign_token().is_some() || param.expr().is_some() { + return None; + } + + let destruct = param.destruct()?; + let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct else { + return None; + }; + let param_name = full.name()?.ident_lit()?.text().to_string(); + let body = function.expr()?; + let facts = extract_facts(&body); + let param_fact = facts.get(¶m_name)?; + lift_fact_to_array_elements(param_fact) +} + +fn lift_fact_to_array_elements(fact: &Fact) -> Option { + Some(Fact { + repr: lift_fact_repr_to_array_elements(&fact.repr)?, + }) +} + +fn lift_fact_repr_to_array_elements(repr: &FactRepr) -> Option { + match repr { + FactRepr::Prim(prim, totality) => Some(FactRepr::ArrayElemType(*prim, *totality)), + FactRepr::And(lhs, rhs) => Some(FactRepr::And( + Box::new(lift_fact_repr_to_array_elements(lhs)?), + Box::new(lift_fact_repr_to_array_elements(rhs)?), + )), + FactRepr::Or(lhs, rhs) => Some(FactRepr::Or( + Box::new(lift_fact_repr_to_array_elements(lhs)?), + Box::new(lift_fact_repr_to_array_elements(rhs)?), + )), + FactRepr::Not(inner) => Some(FactRepr::Not(Box::new(lift_fact_repr_to_array_elements( + inner, + )?))), + _ => None, + } +} + +/// Map std function names to type facts using the spec. +fn unary_std_fn_fact(fn_name: &str) -> Option { + use jrsonnet_std_sig::{get_flow_typing, NarrowsTo, Totality as SigTotality}; + + let ft = get_flow_typing(fn_name)?; + let totality = match ft.totality { + SigTotality::Total => Totality::Total, + SigTotality::Partial => Totality::Partial, + }; + + Some(match ft.narrows_to { + NarrowsTo::Number => Fact::number(totality), + NarrowsTo::String => Fact::string(totality), + NarrowsTo::Bool => Fact::boolean(), + NarrowsTo::Array => Fact::array(totality), + NarrowsTo::Object => Fact::object(totality), + NarrowsTo::Function => Fact::function(), + NarrowsTo::Null => Fact::null(), + }) +} + +/// Check for var == null pattern. +fn check_null_equality( + var_side: &Expr, + null_side: &Expr, + totality: Totality, +) -> Option<(String, Fact)> { + if !is_null_literal(null_side) { + return None; + } + let fact = if totality == Totality::Partial { + Fact { + repr: FactRepr::Prim(PrimFact::Null, Totality::Partial), + } + } else { + Fact::null() + }; + bind_fact_to_expr(var_side, fact) +} + +/// Check for var == "literal" pattern. +fn check_literal_string_equality(var_side: &Expr, literal_side: &Expr) -> Option<(String, Fact)> { + let literal_value = extract_string_literal(literal_side)?; + bind_fact_to_expr(var_side, Fact::literal_string(literal_value)) +} + +/// Check for var == true/false pattern. +fn check_literal_bool_equality(var_side: &Expr, literal_side: &Expr) -> Option<(String, Fact)> { + let literal_value = extract_bool_literal(literal_side)?; + bind_fact_to_expr(var_side, Fact::literal_bool(literal_value)) +} + +/// Check for "field" in obj pattern. +fn check_in_operator(lhs: &Expr, rhs: &Expr) -> Option<(String, Fact)> { + let field_name = extract_string_literal(lhs)?; + bind_fact_to_expr(rhs, Fact::has_field(field_name)) +} + +/// Check for std.type(x) == "typename" pattern. +fn check_std_type_comparison( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + rhs: &Expr, +) -> Option<(String, Fact)> { + let call = extract_std_method_from_binary(binary)?; + if call.method_name != "type" { + return None; + } + + let arg_expr = extract_single_arg_expr(&call.args)?; + let type_str = extract_string_literal(rhs)?; + + let fact = match type_str.as_str() { + "number" => Fact::number(Totality::Total), + "string" => Fact::string(Totality::Total), + "boolean" => Fact::boolean(), + "array" => Fact::array(Totality::Total), + "object" => Fact::object(Totality::Total), + "function" => Fact::function(), + "null" => Fact::null(), + _ => return None, + }; + + bind_fact_to_expr(&arg_expr, fact) +} + +fn non_negative_integral_usize(value: f64) -> Option { + if !(value.is_finite() && value >= 0.0 && value.fract() == 0.0) { + return None; + } + format!("{value:.0}").parse().ok() +} + +/// Check for std.length(x) == n pattern. +fn check_std_length_comparison( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + rhs: &Expr, +) -> Option<(String, Fact)> { + let call = extract_std_method_from_binary(binary)?; + if call.method_name != "length" { + return None; + } + + let arg_expr = extract_single_arg_expr(&call.args)?; + let len = extract_number_literal(rhs)?; + + // Length must be a non-negative integer + if len < 0.0 || len.fract() != 0.0 { + return None; + } + + let len = non_negative_integral_usize(len)?; + bind_fact_to_expr(&arg_expr, Fact::has_len(len)) +} + +/// Check for std.length(x) != 0 pattern (non-empty). +fn check_std_length_not_zero( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + rhs: &Expr, +) -> Option<(String, Fact)> { + let call = extract_std_method_from_binary(binary)?; + if call.method_name != "length" { + return None; + } + + let arg_expr = extract_single_arg_expr(&call.args)?; + let len = extract_number_literal(rhs)?; + + // Only handle != 0 + if len != 0.0 { + return None; + } + + bind_fact_to_expr(&arg_expr, Fact::min_len(1)) +} + +/// Check for std.length(x) > n pattern. +fn check_std_length_greater( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + rhs: &Expr, +) -> Option<(String, Fact)> { + let call = extract_std_method_from_binary(binary)?; + if call.method_name != "length" { + return None; + } + + let arg_expr = extract_single_arg_expr(&call.args)?; + let len = extract_number_literal(rhs)?; + + // Length must be a non-negative integer + if len < 0.0 || len.fract() != 0.0 { + return None; + } + + // std.length(x) > n means length >= n+1 + let len = non_negative_integral_usize(len)?; + bind_fact_to_expr(&arg_expr, Fact::min_len(len + 1)) +} + +/// Check for std.length(x) >= n pattern. +fn check_std_length_greater_eq( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + rhs: &Expr, +) -> Option<(String, Fact)> { + let call = extract_std_method_from_binary(binary)?; + if call.method_name != "length" { + return None; + } + + let arg_expr = extract_single_arg_expr(&call.args)?; + let len = extract_number_literal(rhs)?; + + // Length must be a non-negative integer + if len < 0.0 || len.fract() != 0.0 { + return None; + } + + let len = non_negative_integral_usize(len)?; + bind_fact_to_expr(&arg_expr, Fact::min_len(len)) +} + +/// Extract a number literal value from an expression. +fn extract_number_literal(expr: &Expr) -> Option { + let base = expr.expr_base()?; + let ExprBase::ExprNumber(num) = base else { + return None; + }; + + let number = num.number()?; + let text = number.syntax().text().to_string(); + text.parse().ok() +} + +/// Extract single argument expression from a single-argument function call. +fn extract_single_arg_expr(args: &ArgsDesc) -> Option { + let arg_list: Vec<_> = args.args().collect(); + let [arg] = arg_list.as_slice() else { + return None; + }; + arg.expr() +} + +/// Extract a variable with optional field path. +/// +/// Supports: +/// - `x` -> `VarRef { x, [] }` +/// - `x.a` -> `VarRef { x, ["a"] }` +/// - `x.a.b` -> `VarRef { x, ["b", "a"] }` +/// - `x["a"]` -> `VarRef { x, ["a"] }` +fn extract_var_ref(expr: &Expr) -> Option { + let mut current = expr.clone(); + let mut path = Vec::new(); + + loop { + let base = current.expr_base()?; + match base { + ExprBase::ExprVar(var) => { + if var_resolves_to_builtin_std(&var) { + return None; + } + let name = var.name()?.ident_lit()?.text().to_string(); + return Some(VarRef { + var_name: name, + path, + }); + } + ExprBase::ExprField(field) => { + let field_name = field.field()?.ident_lit()?.text().to_string(); + path.push(field_name); + current = field.base()?; + } + ExprBase::ExprIndex(index) => { + let field_name = extract_string_literal(&index.index()?)?; + path.push(field_name); + current = index.base()?; + } + ExprBase::ExprParened(parens) => { + current = parens.expr()?; + } + _ => return None, + } + } +} + +/// Extract a string literal value from an expression. +fn extract_string_literal(expr: &Expr) -> Option { + let base = expr.expr_base()?; + let ExprBase::ExprString(s) = base else { + return None; + }; + + // Get the string text + let text = s.syntax().first_token()?.text().to_string(); + + // Remove quotes - handle both single and double quotes + if (text.starts_with('"') && text.ends_with('"')) + || (text.starts_with('\'') && text.ends_with('\'')) + { + Some(text[1..text.len() - 1].to_string()) + } else { + None + } +} + +/// Check if an expression is a null literal. +fn is_null_literal(expr: &Expr) -> bool { + let Some(base) = expr.expr_base() else { + return false; + }; + if let ExprBase::ExprLiteral(lit) = base { + lit.literal() + .is_some_and(|l| l.kind() == LiteralKind::NullKw) + } else { + false + } +} + +/// Extract a boolean literal value from an expression. +fn extract_bool_literal(expr: &Expr) -> Option { + let base = expr.expr_base()?; + let ExprBase::ExprLiteral(lit) = base else { + return None; + }; + let literal = lit.literal()?; + match literal.kind() { + LiteralKind::TrueKw => Some(true), + LiteralKind::FalseKw => Some(false), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, GlobalTyStore, MutStore, ObjectData, Ty, TyData, + }; + use rstest::rstest; + + use super::*; + + /// Parse Jsonnet code and extract facts. + fn parse_and_extract(code: &str) -> Facts { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + let expr = ast.expr().expect("should parse"); + extract_facts(&expr) + } + + /// Create an open object with a single field (test helper). + fn object_with_field(name: &str, ty: Ty, required: bool) -> ObjectData { + ObjectData { + fields: vec![( + name.to_string(), + FieldDefInterned { + ty, + required, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + } + } + + #[rstest] + #[case::number_narrows_any(Fact::number(Totality::Total), Ty::ANY, Ty::NUMBER)] + #[case::string_narrows_any(Fact::string(Totality::Total), Ty::ANY, Ty::STRING)] + #[case::null_narrows_any(Fact::null(), Ty::ANY, Ty::NULL)] + #[case::boolean_narrows_any(Fact::boolean(), Ty::ANY, Ty::BOOL)] + fn test_fact_apply_to(#[case] fact: Fact, #[case] input: Ty, #[case] expected: Ty) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let result = fact.apply_to(input, &mut store); + assert_eq!(result, expected); + } + + #[rstest] + #[case::number_removes_from_union( + Fact::number(Totality::Total), + vec![Ty::NUMBER, Ty::STRING], + Ty::STRING + )] + #[case::null_removes_from_union( + Fact::null(), + vec![Ty::NULL, Ty::STRING], + Ty::STRING + )] + fn test_total_fact_negation( + #[case] fact: Fact, + #[case] union_types: Vec, + #[case] expected: Ty, + ) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let input = store.union(union_types); + let result = fact.apply_negated(input, &mut store); + assert_eq!(result, expected); + } + + #[test] + fn test_partial_fact_negation_preserves_type() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = Fact::number(Totality::Partial); + let input = store.union(vec![Ty::NUMBER, Ty::STRING]); + let result = fact.apply_negated(input, &mut store); + // Partial facts cannot negate - type unchanged + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(result, expected); + } + + #[test] + fn test_has_field_fact_produces_object_with_field() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = Fact::has_field("foo".to_string()); + let result = fact.apply_to(Ty::ANY, &mut store); + let expected = store.object(object_with_field("foo", Ty::ANY, true)); + assert_eq!(result, expected); + } + + #[test] + fn test_and_fact_intersects_constraints() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let combined = Fact::number(Totality::Total).and(Fact::number(Totality::Total)); + let result = combined.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_or_fact_unions_constraints() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let combined = Fact::number(Totality::Total).or(Fact::string(Totality::Total)); + let result = combined.apply_to(Ty::ANY, &mut store); + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(result, expected); + } + + #[test] + fn test_de_morgan_negated_or_becomes_and() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + // !(number || string) = !number && !string + let combined = Fact::number(Totality::Total) + .or(Fact::string(Totality::Total)) + .negated(); + let input = store.union(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + let result = combined.apply_to(input, &mut store); + assert_eq!(result, Ty::BOOL); + } + + #[test] + fn test_double_negation_cancels() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = !(!Fact::number(Totality::Total)); + let result = fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_facts_collection_stores_by_variable() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let mut facts = Facts::new(); + facts.add("x".to_string(), Fact::number(Totality::Total)); + facts.add("y".to_string(), Fact::string(Totality::Total)); + + let x_fact = facts.get("x").expect("x should exist"); + let y_fact = facts.get("y").expect("y should exist"); + let z_fact = facts.get("z"); + + assert_eq!(x_fact.apply_to(Ty::ANY, &mut store), Ty::NUMBER); + assert_eq!(y_fact.apply_to(Ty::ANY, &mut store), Ty::STRING); + assert_eq!(z_fact, None); + } + + #[test] + fn test_facts_add_same_variable_combines_with_and() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let mut facts = Facts::new(); + facts.add("x".to_string(), Fact::object(Totality::Total)); + facts.add("x".to_string(), Fact::has_field("foo".to_string())); + + let result = facts + .get("x") + .expect("x should exist") + .apply_to(Ty::ANY, &mut store); + let expected = store.object(object_with_field("foo", Ty::ANY, true)); + assert_eq!(result, expected); + } + + #[test] + fn test_facts_and_combine_merges_all_variables() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let mut f1 = Facts::new(); + f1.add("x".to_string(), Fact::number(Totality::Total)); + + let mut f2 = Facts::new(); + f2.add("y".to_string(), Fact::string(Totality::Total)); + + let combined = f1.and_combine(f2); + + let x_result = combined.get("x").expect("x").apply_to(Ty::ANY, &mut store); + let y_result = combined.get("y").expect("y").apply_to(Ty::ANY, &mut store); + assert_eq!(x_result, Ty::NUMBER); + assert_eq!(y_result, Ty::STRING); + } + + #[test] + fn test_facts_or_combine_keeps_only_common_variables() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let mut f1 = Facts::new(); + f1.add("x".to_string(), Fact::number(Totality::Total)); + + let mut f2 = Facts::new(); + f2.add("x".to_string(), Fact::string(Totality::Total)); + f2.add("y".to_string(), Fact::string(Totality::Total)); + + let combined = f1.or_combine(f2); + + let x_result = combined.get("x").expect("x").apply_to(Ty::ANY, &mut store); + let y_exists = combined.get("y"); + + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(x_result, expected); + assert_eq!(y_exists, None); + } + + #[rstest] + #[case::is_number("std.isNumber(x)", Ty::NUMBER)] + #[case::is_string("std.isString(x)", Ty::STRING)] + #[case::is_boolean("std.isBoolean(x)", Ty::BOOL)] + fn test_extract_std_is_primitive(#[case] code: &str, #[case] expected: Ty) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(code); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_std_call_through_alias() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("local s = std; s.isNumber(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_extract_std_call_through_alias_chain() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("local s = std; local t = s; t.isString(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_extract_std_call_shadowed_std_is_not_builtin() { + let facts = parse_and_extract("local std = { isNumber(v): true }; std.isNumber(x)"); + assert_eq!(facts.get("x"), None); + } + + #[test] + fn test_extract_std_is_array() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isArray(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + let expected = store.array(Ty::ANY); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_std_is_object() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isObject(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + let expected = store.object(ObjectData::open()); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_std_is_function() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isFunction(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert!(matches!(store.get(result), TyData::Function(_))); + } + + #[test] + fn test_extract_eq_null() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("x == null"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NULL); + } + + #[test] + fn test_extract_ne_null() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("x != null"); + let x_fact = facts.get("x").expect("should have fact for x"); + let input = store.union(vec![Ty::NULL, Ty::STRING]); + let result = x_fact.apply_to(input, &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_extract_std_object_has() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(r#"std.objectHas(obj, "field")"#); + let obj_fact = facts.get("obj").expect("should have fact for obj"); + let result = obj_fact.apply_to(Ty::ANY, &mut store); + let expected = store.object(object_with_field("field", Ty::ANY, true)); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_std_object_has_ex() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(r#"std.objectHasEx(obj, "field", true)"#); + let obj_fact = facts.get("obj").expect("should have fact for obj"); + let result = obj_fact.apply_to(Ty::ANY, &mut store); + let expected = store.object(object_with_field("field", Ty::ANY, true)); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_in_operator() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(r#""field" in obj"#); + let obj_fact = facts.get("obj").expect("should have fact for obj"); + let result = obj_fact.apply_to(Ty::ANY, &mut store); + let expected = store.object(object_with_field("field", Ty::ANY, true)); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_logical_and() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isNumber(x) && std.isString(y)"); + let x_result = facts.get("x").expect("x").apply_to(Ty::ANY, &mut store); + let y_result = facts.get("y").expect("y").apply_to(Ty::ANY, &mut store); + assert_eq!(x_result, Ty::NUMBER); + assert_eq!(y_result, Ty::STRING); + } + + #[test] + fn test_extract_logical_or() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isNumber(x) || std.isString(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_negation() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("!std.isNumber(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let input = store.union(vec![Ty::NUMBER, Ty::STRING]); + let result = x_fact.apply_to(input, &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_extract_std_type_comparison() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(r#"std.type(x) == "number""#); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_extract_partial_predicate_cannot_negate() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isInteger(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + + let positive = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(positive, Ty::NUMBER); + + let input = store.union(vec![Ty::NUMBER, Ty::STRING]); + let negative = x_fact.apply_negated(input, &mut store); + // Partial facts cannot negate + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(negative, expected); + } + + #[test] + fn test_extract_std_length_equality() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.length(arr) == 3"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Array with unknown elements becomes a 3-tuple + let input = store.array(Ty::NUMBER); + let result = arr_fact.apply_to(input, &mut store); + let expected = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_std_length_greater_than_zero() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.length(arr) > 0"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + let array_ty = store.array(Ty::NUMBER); + let tuple_non_empty = store.tuple(vec![Ty::NUMBER]); + let tuple_empty = store.tuple(vec![]); + let mixed = store.union(vec![Ty::NUMBER, array_ty, tuple_non_empty, tuple_empty]); + + let result = arr_fact.apply_to(mixed, &mut store); + let expected = store.union(vec![array_ty, tuple_non_empty]); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_std_length_not_zero() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.length(arr) != 0"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + let tuple_empty = store.tuple(vec![]); + let tuple_non_empty = store.tuple(vec![Ty::STRING]); + let input = store.union(vec![tuple_empty, tuple_non_empty]); + let result = arr_fact.apply_to(input, &mut store); + assert_eq!(result, tuple_non_empty); + } + + #[test] + fn test_extract_std_length_greater_eq_n_narrows_literal_strings() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.length(s) >= 2"); + let s_fact = facts.get("s").expect("should have fact for s"); + + let empty = store.literal_string(String::new()); + let one = store.literal_string("x".to_string()); + let two = store.literal_string("xy".to_string()); + let three = store.literal_string("xyz".to_string()); + let input = store.union(vec![empty, one, two, three]); + + let result = s_fact.apply_to(input, &mut store); + let expected = store.union(vec![two, three]); + assert_eq!(result, expected); + } + + // Higher-order predicate tests + + #[test] + fn test_higher_order_predicate_all_map_is_number() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.all(std.map(std.isNumber, arr))"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Narrows Array to Array + let input = store.array(Ty::ANY); + let result = arr_fact.apply_to(input, &mut store); + let expected = store.array(Ty::NUMBER); + assert_eq!(result, expected); + } + + #[test] + fn test_higher_order_predicate_all_map_is_string() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.all(std.map(std.isString, arr))"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Narrows Array to Array + let input = store.array(Ty::ANY); + let result = arr_fact.apply_to(input, &mut store); + let expected = store.array(Ty::STRING); + assert_eq!(result, expected); + } + + #[test] + fn test_higher_order_predicate_all_map_is_object() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.all(std.map(std.isObject, arr))"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Narrows Array to Array + let input = store.array(Ty::ANY); + let result = arr_fact.apply_to(input, &mut store); + let obj_ty = store.object(ObjectData::open()); + let expected = store.array(obj_ty); + assert_eq!(result, expected); + } + + #[test] + fn test_higher_order_predicate_narrows_existing_array() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.all(std.map(std.isNumber, arr))"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Narrows existing array type to Number elements + let elem_union = store.union(vec![Ty::NUMBER, Ty::STRING]); + let input = store.array(elem_union); + let result = arr_fact.apply_to(input, &mut store); + let expected = store.array(Ty::NUMBER); + assert_eq!(result, expected); + } + + #[test] + fn test_higher_order_predicate_with_any_type() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.all(std.map(std.isNumber, x))"); + let x_fact = facts.get("x").expect("should have fact for x"); + + // When applied to Any, narrows to Array + let result = x_fact.apply_to(Ty::ANY, &mut store); + let expected = store.array(Ty::NUMBER); + assert_eq!(result, expected); + } + + #[test] + fn test_higher_order_predicate_with_function_predicate_union() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = + parse_and_extract("std.all(std.map(function(x) x == null || std.isNumber(x), arr))"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + let result = arr_fact.apply_to(Ty::ANY, &mut store); + let elem_union = store.union(vec![Ty::NULL, Ty::NUMBER]); + let expected = store.array(elem_union); + assert_eq!(result, expected); + } + + #[rstest] + #[case::is_decimal(Fact::number(Totality::Partial))] + #[case::is_integer(Fact::number(Totality::Partial))] + fn test_partial_predicates_cannot_negate(#[case] fact: Fact) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let input = store.union(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + // Partial facts should not remove anything when negated + let result = fact.apply_negated(input, &mut store); + let expected = store.union(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + assert_eq!(result, expected); + } + + #[test] + fn test_partial_and_total_combination() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + // Combining partial and total facts + // isNumber (total) AND isInteger (partial) + let total = Fact::number(Totality::Total); + let partial = Fact::number(Totality::Partial); + let combined = total.and(partial); + + // Positive case: narrows to Number + let result = combined.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NUMBER); + + // Negative case: !(total AND partial) = !total OR !partial + // !total widens, !partial preserves, union of both + let input = store.union(vec![Ty::NUMBER, Ty::STRING]); + let negated = combined.apply_negated(input, &mut store); + // Should be union of (String, Union(Number, String)) = Union(Number, String) + assert!( + matches!(store.get(negated), TyData::Union(_)), + "Expected Union type, got {:?}", + store.get(negated) + ); + } + + #[test] + fn test_total_fact_negation_removes_type() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = Fact::number(Totality::Total); + let input = store.union(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + let result = fact.apply_negated(input, &mut store); + // Number should be removed from the union + let expected = store.union(vec![Ty::STRING, Ty::BOOL]); + assert_eq!(result, expected); + } + + #[rstest] + #[case::literal_bool_true_narrows_any(Fact::literal_bool(true), Ty::ANY, Ty::TRUE)] + #[case::literal_bool_false_narrows_any(Fact::literal_bool(false), Ty::ANY, Ty::FALSE)] + #[case::literal_bool_true_narrows_bool(Fact::literal_bool(true), Ty::BOOL, Ty::TRUE)] + fn test_literal_fact_apply_to_primitives( + #[case] fact: Fact, + #[case] input: Ty, + #[case] expected: Ty, + ) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + assert_eq!(fact.apply_to(input, &mut store), expected); + } + + #[test] + fn test_literal_string_fact_narrows_any() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = Fact::literal_string("hello".to_string()); + let result = fact.apply_to(Ty::ANY, &mut store); + let expected = store.literal_string("hello".to_string()); + assert_eq!(result, expected); + } + + #[test] + fn test_literal_string_fact_narrows_string() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = Fact::literal_string("hello".to_string()); + let result = fact.apply_to(Ty::STRING, &mut store); + let expected = store.literal_string("hello".to_string()); + assert_eq!(result, expected); + } + + #[rstest] + #[case::literal_bool_true_negated_becomes_false(Fact::literal_bool(true), Ty::BOOL, Ty::FALSE)] + #[case::literal_bool_false_negated_becomes_true(Fact::literal_bool(false), Ty::BOOL, Ty::TRUE)] + fn test_literal_bool_fact_apply_negated( + #[case] fact: Fact, + #[case] input: Ty, + #[case] expected: Ty, + ) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + assert_eq!(fact.apply_negated(input, &mut store), expected); + } + + #[test] + fn test_extract_literal_string_equality_fact() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(r#"x == "hello""#); + let fact = facts.get("x").expect("should have fact for x"); + let result = fact.apply_to(Ty::ANY, &mut store); + let expected = store.literal_string("hello".to_string()); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_literal_bool_equality_fact_true() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("x == true"); + let fact = facts.get("x").expect("should have fact for x"); + assert_eq!(fact.apply_to(Ty::ANY, &mut store), Ty::TRUE); + } + + #[test] + fn test_extract_literal_bool_equality_fact_false() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("x == false"); + let fact = facts.get("x").expect("should have fact for x"); + assert_eq!(fact.apply_to(Ty::ANY, &mut store), Ty::FALSE); + } + + #[test] + fn test_extract_literal_string_reversed() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + // "hello" == x should also work + let facts = parse_and_extract(r#""hello" == x"#); + let fact = facts.get("x").expect("should have fact for x"); + let result = fact.apply_to(Ty::ANY, &mut store); + let expected = store.literal_string("hello".to_string()); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_literal_bool_not_equal() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + // x != true should narrow x to false in the then-branch via negation + let facts = parse_and_extract("x != true"); + let fact = facts.get("x").expect("should have fact for x"); + // The fact is negated (`!literal_bool(true)`). + // When applied to Bool, it should give LiteralBool(false) + assert_eq!(fact.apply_to(Ty::BOOL, &mut store), Ty::FALSE); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/flow/mod.rs b/crates/jrsonnet-lsp-inference/src/flow/mod.rs new file mode 100644 index 00000000..2c664be3 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/flow/mod.rs @@ -0,0 +1,721 @@ +//! Flow typing: extracting and applying type facts from conditions. +//! +//! This module implements flow-sensitive type narrowing based on +//! conditions. + +mod extract; + +pub use extract::{extract_array_predicate_fact, extract_facts}; +use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, FunctionData, MutStore, ObjectData, ReturnSpec, Ty, TyData, +}; +use rustc_hash::FxHashMap; + +/// Totality indicates whether a fact can be negated. +/// +/// - `Total`: The fact fully classifies the value. For example, `std.isNumber(x)` +/// is total because if it's false, we know `x` is definitely NOT a number. +/// - `Partial`: The fact only partially classifies the value. For example, +/// `std.isInteger(x)` is partial because if it's false, `x` might still be +/// a decimal number. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Totality { + /// Fact can be negated - if false, the negation applies. + Total, + /// Fact cannot be negated - if false, we learn nothing. + Partial, +} + +/// A type fact about a single variable. +/// +/// Facts represent what we know about a variable's type based on a condition. +/// They can be combined using logical operators and applied to narrow types. +#[derive(Debug, Clone, PartialEq)] +pub struct Fact { + repr: FactRepr, +} + +/// Internal representation of a fact. +#[derive(Debug, Clone, PartialEq)] +enum FactRepr { + /// Narrows to a primitive type (number, string, bool, null, array, object, function). + Prim(PrimFact, Totality), + /// Object has a field with optional type constraint. + HasField { + field: String, + /// Optional constraint on the field's type. + field_type: Option>, + }, + /// Value has a specific length. + /// For arrays: converts to tuple with that many elements. + /// For objects: closes the object if field count matches. + HasLen(usize), + /// Value has at least this length (non-empty check). + /// Useful for `std.length(x) > 0` patterns. + MinLen(usize), + /// Array elements are all of a specific type. + /// Used for patterns like `std.all(std.map(std.isNumber, arr))`. + ArrayElemType(PrimFact, Totality), + /// Value equals a literal boolean (true or false). + /// Used for `x == true` or `x == false` patterns. + LiteralBool(bool), + /// Value equals a literal string. + /// Used for `x == "literal"` patterns. + LiteralString(String), + /// Logical AND of two facts. + And(Box, Box), + /// Logical OR of two facts. + Or(Box, Box), + /// Logical NOT of a fact. + Not(Box), +} + +/// Primitive type facts. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum PrimFact { + Null, + Bool, + Number, + String, + Array, + Object, + Function, +} + +impl PrimFact { + /// Convert to an interned Ty. + fn as_ty(self, store: &mut MutStore) -> Ty { + match self { + PrimFact::Null => Ty::NULL, + PrimFact::Bool => Ty::BOOL, + PrimFact::Number => Ty::NUMBER, + PrimFact::String => Ty::STRING, + PrimFact::Array => store.array(Ty::ANY), + PrimFact::Object => store.object(ObjectData::open()), + PrimFact::Function => store.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }), + } + } + + /// Approximate the complement of this primitive type when starting from `any`. + fn negated_any_ty(self, store: &mut MutStore) -> Ty { + let array_any = store.array(Ty::ANY); + let object_any = store.object(ObjectData::open()); + let function_any = store.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }); + match self { + PrimFact::Null => store.union(vec![ + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + array_any, + object_any, + function_any, + ]), + PrimFact::Bool => store.union(vec![ + Ty::NULL, + Ty::NUMBER, + Ty::STRING, + array_any, + object_any, + function_any, + ]), + PrimFact::Number => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::STRING, + array_any, + object_any, + function_any, + ]), + PrimFact::String => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + array_any, + object_any, + function_any, + ]), + PrimFact::Array => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + object_any, + function_any, + ]), + PrimFact::Object => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + array_any, + function_any, + ]), + PrimFact::Function => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + array_any, + object_any, + ]), + } + } +} + +impl Fact { + /// Create a null fact. + #[must_use] + pub fn null() -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Null, Totality::Total), + } + } + + /// Create a number fact with given totality. + #[must_use] + pub fn number(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Number, totality), + } + } + + /// Create a string fact with given totality. + #[must_use] + pub fn string(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::String, totality), + } + } + + /// Create an array fact with given totality. + #[must_use] + pub fn array(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Array, totality), + } + } + + /// Create an object fact with given totality. + #[must_use] + pub fn object(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Object, totality), + } + } + + /// Create a function fact. + #[must_use] + pub fn function() -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Function, Totality::Total), + } + } + + /// Create a boolean fact. + #[must_use] + pub fn boolean() -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Bool, Totality::Total), + } + } + + /// Create a fact that an object has a field. + #[must_use] + pub fn has_field(field: String) -> Self { + Self { + repr: FactRepr::HasField { + field, + field_type: None, + }, + } + } + + /// Create a fact that an object has a field with a specific type. + #[must_use] + pub fn has_field_typed(field: String, field_fact: Fact) -> Self { + Self { + repr: FactRepr::HasField { + field, + field_type: Some(Box::new(field_fact)), + }, + } + } + + /// Create a fact that a value has a specific length. + /// Applies to arrays, strings, and objects. + #[must_use] + pub fn has_len(len: usize) -> Self { + Self { + repr: FactRepr::HasLen(len), + } + } + + /// Create a fact that a value has at least a minimum length. + /// Useful for non-empty checks like `std.length(x) > 0`. + #[must_use] + pub fn min_len(min: usize) -> Self { + Self { + repr: FactRepr::MinLen(min), + } + } + + /// Create a fact that an array's elements are all of a specific type. + /// Used for higher-order predicates like `std.all(std.map(std.isNumber, arr))`. + fn array_elem_number(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Number, totality), + } + } + + fn array_elem_string(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::String, totality), + } + } + + fn array_elem_bool(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Bool, totality), + } + } + + fn array_elem_array(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Array, totality), + } + } + + fn array_elem_object(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Object, totality), + } + } + + fn array_elem_function(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Function, totality), + } + } + + /// Create a fact that a value equals a specific boolean literal. + /// Used for `x == true` or `x == false` patterns. + #[must_use] + pub fn literal_bool(value: bool) -> Self { + Self { + repr: FactRepr::LiteralBool(value), + } + } + + /// Create a fact that a value equals a specific string literal. + /// Used for `x == "literal"` patterns. + #[must_use] + pub fn literal_string(value: String) -> Self { + Self { + repr: FactRepr::LiteralString(value), + } + } + + /// Logical AND of two facts. + #[must_use] + pub fn and(self, other: Self) -> Self { + Self { + repr: FactRepr::And(Box::new(self.repr), Box::new(other.repr)), + } + } + + /// Logical OR of two facts. + #[must_use] + pub fn or(self, other: Self) -> Self { + Self { + repr: FactRepr::Or(Box::new(self.repr), Box::new(other.repr)), + } + } + + /// Return the logical negation of this fact. + #[must_use] + pub fn negated(self) -> Self { + Self { + repr: FactRepr::Not(Box::new(self.repr)), + } + } + + /// Apply this fact to narrow a type. + /// + /// Returns the narrowed type when the fact is known to be true. + pub fn apply_to(&self, ty: Ty, store: &mut MutStore) -> Ty { + self.repr.apply_to(ty, store) + } + + /// Apply the negation of this fact to narrow a type. + /// + /// Returns the narrowed type when the fact is known to be false. + pub fn apply_negated(&self, ty: Ty, store: &mut MutStore) -> Ty { + self.repr.apply_negated(ty, store) + } +} + +impl std::ops::Not for Fact { + type Output = Self; + + fn not(self) -> Self::Output { + self.negated() + } +} + +impl FactRepr { + /// Apply this fact to narrow a type (when fact is true). + fn apply_to(&self, ty: Ty, store: &mut MutStore) -> Ty { + match self { + FactRepr::Prim(prim, _) => { + let constraint = prim.as_ty(store); + store.narrow(ty, constraint) + } + + FactRepr::HasField { field, field_type } => { + apply_has_field_fact(ty, field, field_type.as_deref(), store) + } + + FactRepr::HasLen(len) => store.with_len(ty, *len), + + FactRepr::MinLen(min) => store.with_min_len(ty, *min), + + FactRepr::ArrayElemType(prim, _) => { + let prim_ty = prim.as_ty(store); + apply_array_elem_constraint(ty, prim_ty, store) + } + + FactRepr::LiteralBool(value) => { + // Narrow to the specific boolean literal type + let constraint = if *value { Ty::TRUE } else { Ty::FALSE }; + store.narrow(ty, constraint) + } + + FactRepr::LiteralString(value) => { + // Narrow to the specific string literal type + let constraint = store.literal_string(value.clone()); + store.narrow(ty, constraint) + } + + FactRepr::And(lhs, rhs) => { + // Apply both facts sequentially + let narrowed = lhs.apply_to(ty, store); + rhs.apply_to(narrowed, store) + } + + FactRepr::Or(lhs, rhs) => { + if let ( + FactRepr::ArrayElemType(lhs_prim, _), + FactRepr::ArrayElemType(rhs_prim, _), + ) = (&**lhs, &**rhs) + { + let lhs_ty = lhs_prim.as_ty(store); + let rhs_ty = rhs_prim.as_ty(store); + let union_ty = store.union(vec![lhs_ty, rhs_ty]); + return apply_array_elem_constraint(ty, union_ty, store); + } + // Apply each fact and union the results + // (a || b) means: either a is true OR b is true + // So the type is: (ty narrowed by a) | (ty narrowed by b) + let t1 = lhs.apply_to(ty, store); + let t2 = rhs.apply_to(ty, store); + store.union(vec![t1, t2]) + } + + FactRepr::Not(inner) => { + // Apply negated inner fact + inner.apply_negated(ty, store) + } + } + } + + /// Apply the negation of this fact (when fact is false). + fn apply_negated(&self, ty: Ty, store: &mut MutStore) -> Ty { + match self { + FactRepr::Prim(prim, totality) => { + match totality { + Totality::Total => { + // Can negate: widen by removing this type + let remove = prim.as_ty(store); + store.widen(ty, remove) + } + Totality::Partial => { + // Cannot negate: type unchanged + ty + } + } + } + + FactRepr::HasField { field, field_type } => { + let negated_field_ty = field_type.as_ref().map_or(Ty::NEVER, |inner| match &inner + .repr + { + FactRepr::Prim(prim, Totality::Total) => prim.negated_any_ty(store), + _ => inner.apply_negated(Ty::ANY, store), + }); + let constraint = store.object(ObjectData { + fields: vec![( + field.clone(), + FieldDefInterned { + ty: negated_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }); + store.narrow(ty, constraint) + } + + FactRepr::HasLen(_) => { + // Negating "has length n" doesn't narrow the type in a useful way + // (it just has a different length) + ty + } + + FactRepr::MinLen(min) => { + // Negating "min length n" means length < n + // For min=1 (non-empty), negation means empty (length 0) + if *min == 1 { + store.with_len(ty, 0) + } else { + ty + } + } + + FactRepr::ArrayElemType(_prim, totality) => { + // Negating "all elements are type T" + match totality { + Totality::Total => { + // Total: we know at least one element is NOT T + // This doesn't narrow the type in a useful way + ty + } + Totality::Partial => { + // Partial: can't negate + ty + } + } + } + + FactRepr::LiteralBool(value) => { + // Negating "x == true" means x is false (and vice versa) + // This is total: if x != true and x is a bool, then x == false + let constraint = if *value { Ty::FALSE } else { Ty::TRUE }; + store.narrow(ty, constraint) + } + + FactRepr::LiteralString(value) => { + // Negating "x == literal" means x != literal + // Widen by removing the literal type + let remove = store.literal_string(value.clone()); + store.widen(ty, remove) + } + + // De Morgan's laws: + // !(a && b) = !a || !b + FactRepr::And(lhs, rhs) => { + let t1 = lhs.apply_negated(ty, store); + let t2 = rhs.apply_negated(ty, store); + store.union(vec![t1, t2]) + } + + // !(a || b) = !a && !b + FactRepr::Or(lhs, rhs) => { + let narrowed = lhs.apply_negated(ty, store); + rhs.apply_negated(narrowed, store) + } + + // Double negation: !!a = a + FactRepr::Not(inner) => inner.apply_to(ty, store), + } + } +} + +fn apply_array_elem_constraint(ty: Ty, elem_constraint: Ty, store: &mut MutStore) -> Ty { + match store.get(ty) { + TyData::Array { .. } | TyData::Any => store.array(elem_constraint), + TyData::Tuple { elems } => { + let narrowed: Vec<_> = elems + .iter() + .map(|&e| store.narrow(e, elem_constraint)) + .filter(|&e| e != Ty::NEVER) + .collect(); + if narrowed.is_empty() { + Ty::NEVER + } else { + store.tuple(narrowed) + } + } + TyData::Union(types) => { + let narrowed: Vec<_> = types + .iter() + .map(|&variant| apply_array_elem_constraint(variant, elem_constraint, store)) + .filter(|&variant| variant != Ty::NEVER) + .collect(); + store.union(narrowed) + } + _ => ty, + } +} + +fn apply_has_field_fact( + ty: Ty, + field: &str, + field_fact: Option<&Fact>, + store: &mut MutStore, +) -> Ty { + let required_field_ty = field_fact.map_or(Ty::ANY, |fact| fact.apply_to(Ty::ANY, store)); + + match store.get(ty) { + TyData::Object(mut obj_data) => { + if let Some((_, existing_field)) = + obj_data.fields.iter_mut().find(|(name, _)| name == field) + { + let next_field_ty = field_fact.map_or(existing_field.ty, |fact| { + fact.apply_to(existing_field.ty, store) + }); + if next_field_ty == Ty::NEVER { + return Ty::NEVER; + } + existing_field.ty = next_field_ty; + existing_field.required = true; + return store.object(obj_data); + } + + if obj_data.has_unknown { + obj_data.fields.push(( + field.to_string(), + FieldDefInterned { + ty: required_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )); + return store.object(obj_data); + } + + Ty::NEVER + } + TyData::Union(types) => { + let narrowed: Vec<_> = types + .iter() + .map(|&variant| apply_has_field_fact(variant, field, field_fact, store)) + .filter(|&variant| variant != Ty::NEVER) + .collect(); + store.union(narrowed) + } + TyData::Any => store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: required_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }), + _ => { + let constraint = store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: required_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }); + store.narrow(ty, constraint) + } + } +} + +/// A collection of facts about multiple variables. +#[derive(Debug, Clone, Default)] +pub struct Facts { + /// Map from variable name to fact about that variable. + facts: FxHashMap, +} + +impl Facts { + /// Create an empty facts collection. + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Add a fact for a variable. + /// + /// If a fact already exists for this variable, they are `ANDed` together. + pub fn add(&mut self, var_name: String, fact: Fact) { + if let Some(existing) = self.facts.remove(&var_name) { + self.facts.insert(var_name, existing.and(fact)); + } else { + self.facts.insert(var_name, fact); + } + } + + /// Get the fact for a variable, if any. + #[must_use] + pub fn get(&self, var_name: &str) -> Option<&Fact> { + self.facts.get(var_name) + } + + /// Check if there are any facts. + #[must_use] + pub fn is_empty(&self) -> bool { + self.facts.is_empty() + } + + /// Iterate over all facts. + pub fn iter(&self) -> impl Iterator { + self.facts.iter() + } + + /// Combine two fact sets with OR. + /// + /// Only keeps facts that exist in both sets, combining them with OR. + /// This is used for || conditions. + #[must_use] + pub fn or_combine(mut self, mut other: Self) -> Self { + let mut result = Facts::new(); + for (var_name, fact) in self.facts.drain() { + if let Some(other_fact) = other.facts.remove(&var_name) { + result.facts.insert(var_name, fact.or(other_fact)); + } + // If not in both, we learn nothing + } + result + } + + /// Combine two fact sets with AND (merge). + /// + /// Combines all facts from both sets. + #[must_use] + pub fn and_combine(mut self, other: Self) -> Self { + for (var_name, fact) in other.facts { + self.add(var_name, fact); + } + self + } + + /// Negate all facts in this collection. + #[must_use] + pub fn negate(self) -> Self { + let mut result = Facts::new(); + for (var_name, fact) in self.facts { + result.facts.insert(var_name, !fact); + } + result + } +} diff --git a/crates/jrsonnet-lsp-inference/src/helpers.rs b/crates/jrsonnet-lsp-inference/src/helpers.rs new file mode 100644 index 00000000..16a8c25d --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/helpers.rs @@ -0,0 +1,201 @@ +//! Helper functions for type inference. + +use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; +use jrsonnet_lsp_types::{FieldVis, ParamInterned, Ty}; +use jrsonnet_rowan_parser::{ + nodes::{ExprBase, FieldName, Visibility, VisibilityKind}, + AstToken, +}; + +use crate::{env::TypeEnv, expr::infer_expr_ty}; + +/// Convert AST visibility to interned field visibility. +#[must_use] +pub fn convert_visibility_ty(vis: Option) -> FieldVis { + match vis.map(|v| v.kind()) { + Some(VisibilityKind::Coloncoloncolon) => FieldVis::ForceVisible, // ::: + Some(VisibilityKind::Coloncolon) => FieldVis::Hidden, // :: + Some(VisibilityKind::Colon) | None => FieldVis::Normal, // : (default) + } +} + +/// Extract field name from a `FieldName` node. +#[must_use] +pub fn extract_field_name(field_name: &FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => { + // Try id first, then text (for string keys) + if let Some(name) = fixed.id() { + if let Some(ident) = name.ident_lit() { + return Some(ident.text().to_string()); + } + } + // String key + if let Some(text) = fixed.text() { + // Return the text content without quotes + let s = text.syntax().text(); + // Remove quotes + if s.len() >= 2 { + return Some(s[1..s.len() - 1].to_string()); + } + } + None + } + FieldName::FieldNameDynamic(_) => { + // Dynamic field names can't be statically determined + None + } + } +} + +/// Infer type for stdlib field access (e.g., `std.map`), returning `Ty`. +/// +/// Returns the interned function type for stdlib functions when accessing them as fields. +pub fn infer_stdlib_field_access_ty( + field: &jrsonnet_rowan_parser::nodes::ExprField, + env: &mut TypeEnv, +) -> Option { + let base = field.base()?; + if !expr_resolves_to_builtin_std(&base) { + return None; + } + + // Get the field name + let field_name = field.field()?.ident_lit()?.text().to_string(); + + // Look up the stdlib function type and import it into the current store + if let Some(ty) = + jrsonnet_lsp_stdlib::import_stdlib_func_to_mut_store(env.store_mut(), &field_name) + { + return Some(ty); + } + + // Unknown stdlib function - return generic function type (interned) + Some(env.store_mut().function_any()) +} + +/// Extract a variable name from an expression if it's a simple variable reference. +/// +/// This is used for constraint tracking - we can only track constraints on +/// direct variable references, not complex expressions. +#[must_use] +pub fn extract_var_name_from_expr(expr: &jrsonnet_rowan_parser::nodes::Expr) -> Option { + let ExprBase::ExprVar(var) = expr.expr_base()? else { + return None; + }; + let name_node = var.name()?; + let ident = name_node.ident_lit()?; + Some(ident.text().to_string()) +} + +/// Extract parameter information (name and `has_default`) from a `ParamsDesc`. +/// +/// Returns a list of (name, `has_default`) pairs for each parameter. +#[must_use] +pub fn extract_params_with_defaults( + params_desc: &jrsonnet_rowan_parser::nodes::ParamsDesc, +) -> Vec<(String, bool)> { + params_desc + .params() + .filter_map(|param| { + let name = param.destruct().and_then(|d| match d { + jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) => full + .name() + .and_then(|n| n.ident_lit()) + .map(|i| i.text().to_string()), + _ => None, + })?; + // A parameter has a default if it has an = token or an expr + let has_default = param.assign_token().is_some() || param.expr().is_some(); + Some((name, has_default)) + }) + .collect() +} + +/// Extract parameters from a function definition as interned types. +/// Returns Vec of `ParamInterned` with name, type from default, and `has_default` flag. +pub fn extract_params_with_default_types_ty( + params_desc: &jrsonnet_rowan_parser::nodes::ParamsDesc, + env: &mut TypeEnv, +) -> Vec { + params_desc + .params() + .filter_map(|param| { + let name = param.destruct().and_then(|d| match d { + jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) => full + .name() + .and_then(|n| n.ident_lit()) + .map(|i| i.text().to_string()), + _ => None, + })?; + + let has_default = param.assign_token().is_some() || param.expr().is_some(); + + // Infer type from default value if present + let default_ty = param + .expr() + .map_or(Ty::ANY, |default_expr| infer_expr_ty(&default_expr, env)); + + Some(ParamInterned { + name, + ty: default_ty, + has_default, + }) + }) + .collect() +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeSet; + + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_types::{ObjectData, TyData}; + + use super::*; + use crate::expr::infer_document_type_ty; + + fn infer_doc(code: &str) -> (Ty, TypeEnv) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + infer_document_type_ty(&doc) + } + + fn try_object(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Object(obj) => Some(obj), + _ => None, + } + } + + fn assert_fields_ty(obj: &ObjectData, expected: &[&str]) { + let actual: BTreeSet<_> = obj.fields.iter().map(|(name, _)| name.as_str()).collect(); + let expected: BTreeSet<_> = expected.iter().copied().collect(); + assert_eq!(actual, expected, "Field mismatch"); + } + + #[test] + fn test_string_key_field_extraction() { + // String keys like { "my-field": 1 } should be extracted correctly + let (ty, env) = infer_doc(r#"{ "my-field": 1, "another-key": 2 }"#); + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["my-field", "another-key"]); + } + + #[test] + fn test_mixed_field_name_styles() { + // Mix of identifier and string key styles + let (ty, env) = infer_doc(r#"{ normal: 1, "string-key": 2 }"#); + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["normal", "string-key"]); + } + + #[test] + fn test_dynamic_field_not_in_static_fields() { + // Dynamic field names [expr] cannot be statically determined + // The object should still be created but without the dynamic field in static analysis + let (ty, env) = infer_doc(r#"{ static: 1, ["dyn" + "amic"]: 2 }"#); + let obj = try_object(&env, ty).expect("expected object"); + // Only the static field should be present + assert_fields_ty(&obj, &["static"]); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/lib.rs b/crates/jrsonnet-lsp-inference/src/lib.rs new file mode 100644 index 00000000..22b0ca63 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/lib.rs @@ -0,0 +1,50 @@ +//! Type inference and flow analysis for jrsonnet LSP. +//! +//! This crate provides type inference, flow-sensitive type narrowing, +//! and type caching for Jsonnet code analysis. +//! +//! # Modules +//! +//! - [`analysis`]: Type analysis storage and queries (`TypeAnalysis`) +//! - [`env`]: Type environment (`TypeEnv`) for tracking variable types +//! - [`expr`]: Expression type inference +//! - [`object`]: Object type inference +//! - [`helpers`]: Helper functions for inference +//! - [`poly`]: Polymorphic type instantiation +//! - [`flow`]: Flow-sensitive type narrowing +//! - [`const_eval`]: Constant expression evaluation +//! - [`manager`]: Document management with type analysis caching +//! - [`type_cache`]: Cross-file type caching +//! - [`provider`]: Type provider for cross-file analysis with dependency handling +//! - [`suggestions`]: String similarity for suggestions + +pub mod analysis; +pub mod const_eval; +pub mod env; +pub mod expr; +pub mod flow; +pub mod helpers; +pub mod manager; +pub mod object; +pub mod poly; +pub mod provider; +pub mod semantic_artifacts; +pub mod suggestions; +pub mod type_cache; + +// Re-export public API +pub use analysis::TypeAnalysis; +pub use const_eval::{trace_base, trace_expr, trace_ident, ConstEvalResult}; +pub use env::{ImportResolver, ObjectContextInterned, TypeEnv, MAX_FUNCTION_INFERENCE_DEPTH}; +pub use expr::{infer_document_type_ty, infer_expr_ty, infer_expr_ty_with_expected, is_divergent}; +pub use flow::{extract_facts, Fact, Facts, Totality}; +pub use helpers::extract_params_with_defaults; +pub use manager::{DocumentManager, SharedDocumentManager}; +pub use poly::instantiate_function_call_ty; +pub use provider::{DocumentSource, TypeProvider}; +pub use semantic_artifacts::{ + DefinitionBindingInfo, SemanticArtifacts, SemanticBindingKind, SemanticImportTarget, + SemanticVisibleBinding, +}; +pub use suggestions::find_best_match; +pub use type_cache::{analyze_and_cache, new_shared_cache, SharedTypeCache, TypeCache}; diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs new file mode 100644 index 00000000..cb8d23e8 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -0,0 +1,747 @@ +//! Document manager for tracking open documents. +//! +//! Uses `DashMap` for concurrent access to documents without +//! requiring a global lock. Thread-safe for use with `TypeProvider`. + +use std::{num::NonZeroUsize, sync::Arc}; + +use dashmap::DashMap; +use jrsonnet_lsp_document::{ + CanonicalPath, DocVersion, Document, FileId, PathResolver, PathStore, + DEFAULT_ANALYSIS_CACHE_CAPACITY, DEFAULT_CLOSED_CACHE_CAPACITY, +}; +use jrsonnet_lsp_types::GlobalTyStore; +use lru::LruCache; +use moka::sync::Cache as MokaCache; +use parking_lot::RwLock; + +use crate::{ + analysis::TypeAnalysis, provider::DocumentSource, semantic_artifacts::SemanticArtifacts, +}; + +/// Cached type analysis for a document. +#[derive(Clone)] +struct CachedAnalysis { + /// Version of the document this analysis was computed for. + version: DocVersion, + /// The cached analysis (wrapped in Arc for sharing). + analysis: Arc, +} + +/// Cached semantic artifacts for a document. +#[derive(Clone)] +struct CachedSemanticArtifacts { + /// Version of the document this artifact set was computed for. + version: DocVersion, + /// The cached artifacts (wrapped in Arc for sharing). + artifacts: Arc, +} + +/// Document manager that tracks open documents and caches recently closed ones. +/// +/// This type is `Sync` and can be safely shared across threads. +pub struct DocumentManager { + /// Interned mapping between canonical paths and stable file ids. + paths: PathStore, + /// Read-only resolver over interned mapping. + resolver: PathResolver, + /// Open documents keyed by interned file ids. + open: DashMap, + /// LRU cache for recently closed documents. + closed: RwLock>, + /// Cached type analysis per document id (validated by version). + analysis_cache: MokaCache, + /// Cached semantic artifacts per document id (validated by version). + semantic_cache: MokaCache, + /// Global type store shared across all analyses. + global_types: Arc, +} + +impl DocumentManager { + /// Create a new document manager with a shared global type store. + pub fn new(global_types: Arc, paths: PathStore) -> Self { + Self::with_capacity(global_types, DEFAULT_CLOSED_CACHE_CAPACITY, paths) + } + + /// Create a new document manager with specific capacities. + pub fn with_capacity( + global_types: Arc, + closed_capacity: usize, + paths: PathStore, + ) -> Self { + let closed_capacity = NonZeroUsize::new(closed_capacity).unwrap_or(NonZeroUsize::MIN); + let resolver = paths.resolver(); + Self { + paths, + resolver, + open: DashMap::new(), + closed: RwLock::new(LruCache::new(closed_capacity)), + analysis_cache: MokaCache::new(DEFAULT_ANALYSIS_CACHE_CAPACITY as u64), + semantic_cache: MokaCache::new(DEFAULT_ANALYSIS_CACHE_CAPACITY as u64), + global_types, + } + } + + /// Get a reference to the global type store. + pub fn global_types(&self) -> &Arc { + &self.global_types + } + + fn intern_path(&self, path: &CanonicalPath) -> FileId { + self.paths.intern(path) + } + + fn file_id(&self, path: &CanonicalPath) -> Option { + self.resolver.file(path) + } + + fn file_id_or_intern(&self, path: &CanonicalPath) -> FileId { + self.file_id(path).unwrap_or_else(|| self.intern_path(path)) + } + + /// Get or create the interned file id for `path`. + #[must_use] + pub fn intern(&self, path: &CanonicalPath) -> FileId { + self.file_id_or_intern(path) + } + + /// Resolve a file path to an interned file identifier. + #[must_use] + pub fn file(&self, path: &CanonicalPath) -> Option { + self.file_id(path) + } + + fn path_for_id(&self, file_id: FileId) -> Option { + self.resolver + .path(file_id) + .map(|path| path.as_ref().clone()) + } + + /// Borrow an interned file identifier's canonical path. + pub fn path(&self, file: FileId) -> Option> { + self.resolver.path(file) + } + + fn invalidate_analysis_by_id(&self, file_id: FileId) { + self.analysis_cache.invalidate(&file_id); + } + + fn invalidate_semantic_artifacts_by_id(&self, file_id: FileId) { + self.semantic_cache.invalidate(&file_id); + } + + /// Invalidate analysis cache by file identifier. + pub fn invalidate_analysis_file(&self, file: FileId) { + self.invalidate_analysis_by_id(file); + } + + /// Invalidate semantic artifacts cache by file identifier. + pub fn invalidate_semantic_artifacts_file(&self, file: FileId) { + self.invalidate_semantic_artifacts_by_id(file); + } + + /// Open a document (called on textDocument/didOpen). + pub fn open(&self, path: CanonicalPath, text: String, version: DocVersion) { + let file_id = self.file_id_or_intern(&path); + + // Move from closed cache if present + { + let mut closed = self.closed.write(); + closed.pop(&file_id); + } + + self.invalidate_analysis_by_id(file_id); + self.invalidate_semantic_artifacts_by_id(file_id); + + let document = Document::new(text, version); + self.open.insert(file_id, document); + } + + /// Update an open document with full text (called on textDocument/didChange with full sync). + /// + /// Returns true if the document was found and updated. + pub fn update(&self, path: &CanonicalPath, text: String, version: DocVersion) -> bool { + let Some(file_id) = self.file_id(path) else { + return false; + }; + + self.open.get_mut(&file_id).is_some_and(|mut doc| { + doc.update(text, version); + self.invalidate_analysis_by_id(file_id); + self.invalidate_semantic_artifacts_by_id(file_id); + true + }) + } + + /// Apply an incremental change to an open document. + /// + /// Returns true if the document was found and the change was applied successfully. + pub fn apply_incremental_change( + &self, + path: &CanonicalPath, + range: lsp_types::Range, + new_text: &str, + version: DocVersion, + ) -> bool { + let Some(file_id) = self.file_id(path) else { + return false; + }; + + self.open.get_mut(&file_id).is_some_and(|mut doc| { + if doc.apply_incremental_change(range, new_text, version) { + self.invalidate_analysis_by_id(file_id); + self.invalidate_semantic_artifacts_by_id(file_id); + true + } else { + false + } + }) + } + + /// Close a document (called on textDocument/didClose). + /// + /// Moves the document to the closed cache for potential reuse. + pub fn close(&self, path: &CanonicalPath) { + let Some(file_id) = self.file_id(path) else { + return; + }; + + if let Some((file_id, document)) = self.open.remove(&file_id) { + let mut closed = self.closed.write(); + closed.put(file_id, document); + } + } + + /// Refresh a closed document from disk after external file changes. + /// + /// Returns `true` when the closed cache was updated. + pub fn refresh_closed_from_disk(&self, path: &CanonicalPath) -> bool { + if self.is_open(path) { + return false; + } + + let Ok(text) = std::fs::read_to_string(path.as_path()) else { + return false; + }; + + let file_id = self.file_id_or_intern(path); + self.closed + .write() + .put(file_id, Document::new(text, DocVersion::new(0))); + self.invalidate_analysis_by_id(file_id); + self.invalidate_semantic_artifacts_by_id(file_id); + true + } + + /// Remove a closed document entry from the cache. + pub fn remove_closed(&self, path: &CanonicalPath) { + let Some(file_id) = self.file_id(path) else { + return; + }; + self.closed.write().pop(&file_id); + self.invalidate_analysis_by_id(file_id); + self.invalidate_semantic_artifacts_by_id(file_id); + } + + /// Get an open document. + pub fn get(&self, path: &CanonicalPath) -> Option { + let file_id = self.file_id(path)?; + self.open.get(&file_id).map(|doc| doc.clone()) + } + + /// Get the document text for a path. + /// + /// Checks open documents first, then the closed cache, + /// and finally tries to read from disk. + pub fn get_text(&self, path: &CanonicalPath) -> Option { + if let Some(file_id) = self.file_id(path) { + // Check open documents + if let Some(doc) = self.open.get(&file_id) { + return Some(doc.text().to_string()); + } + + // Check closed cache (use peek to avoid write lock when just reading) + { + let closed = self.closed.read(); + if let Some(doc) = closed.peek(&file_id) { + return Some(doc.text().to_string()); + } + } + } + + // Try to read from disk + std::fs::read_to_string(path.as_path()).ok() + } + + /// Get a document from any source (open, closed cache, or disk). + /// + /// Returns a cloned Document which is cheap due to internal Arc usage. + pub fn get_document(&self, path: &CanonicalPath) -> Option { + if let Some(file_id) = self.file_id(path) { + if let Some(doc) = self.get_document_file(file_id) { + return Some(doc); + } + } + + // Read from disk once, then cache in `closed` for reuse. + let text = std::fs::read_to_string(path.as_path()).ok()?; + let document = Document::new(text, DocVersion::new(0)); + let file_id = self.file_id_or_intern(path); + { + let mut closed = self.closed.write(); + closed.put(file_id, document.clone()); + } + Some(document) + } + + /// Get a document by interned file id from open docs, closed cache, or disk. + /// + /// Returns a cloned Document which is cheap due to internal Arc usage. + pub fn get_document_file(&self, file: FileId) -> Option { + // Check open documents. + if let Some(doc) = self.open.get(&file) { + return Some(doc.clone()); + } + + // Check closed cache. + { + let closed = self.closed.read(); + if let Some(doc) = closed.peek(&file) { + return Some(doc.clone()); + } + } + + // Read from disk once, then cache in `closed` for reuse. + let path = self.path(file)?; + let text = std::fs::read_to_string(path.as_ref().as_path()).ok()?; + let document = Document::new(text, DocVersion::new(0)); + { + let mut closed = self.closed.write(); + closed.put(file, document.clone()); + } + Some(document) + } + + /// Check if a document is currently open. + pub fn is_open(&self, path: &CanonicalPath) -> bool { + self.file_id(path) + .is_some_and(|file_id| self.open.contains_key(&file_id)) + } + + /// Check if a file is currently open. + pub fn is_open_file(&self, file: FileId) -> bool { + self.open.contains_key(&file) + } + + /// Get the number of open documents. + pub fn open_count(&self) -> usize { + self.open.len() + } + + /// Get all open files. + pub fn open_files(&self) -> Vec { + self.open.iter().map(|entry| *entry.key()).collect() + } + + /// Iterate over all open documents. + pub fn for_each_open(&self, mut f: F) + where + F: FnMut(&CanonicalPath, &Document), + { + for entry in &self.open { + if let Some(path) = self.path_for_id(*entry.key()) { + f(&path, entry.value()); + } + } + } + + /// Get all open document paths. + pub fn open_paths(&self) -> Vec { + self.open_files() + .into_iter() + .filter_map(|file| self.path_for_id(file)) + .collect() + } + + /// Iterate over all open documents. + pub fn iter(&self) -> std::vec::IntoIter<(CanonicalPath, Document)> { + self.open + .iter() + .filter_map(|entry| { + self.path_for_id(*entry.key()) + .map(|path| (path, entry.value().clone())) + }) + .collect::>() + .into_iter() + } + + /// Parallel iterate over all open documents. + pub fn par_iter(&self) -> rayon::vec::IntoIter<(CanonicalPath, Document)> { + use rayon::prelude::*; + self.iter().collect::>().into_par_iter() + } + + /// Get cached type analysis for a document, computing it if needed. + /// + /// Returns `None` if the document is not found. + /// The analysis is cached and reused until the document version changes. + pub fn get_analysis(&self, path: &CanonicalPath) -> Option> { + // Get the document first + let doc = self.get_document(path)?; + let version = doc.version(); + + Some(self.get_or_compute_analysis(path, version, || { + TypeAnalysis::analyze_with_global(&doc, Arc::clone(&self.global_types)) + })) + } + + /// Get cached analysis for `path@version`, or compute and cache it. + pub fn get_or_compute_analysis( + &self, + path: &CanonicalPath, + version: DocVersion, + compute: F, + ) -> Arc + where + F: FnOnce() -> TypeAnalysis, + { + let file_id = self.file_id_or_intern(path); + + if let Some(cached) = self.analysis_cache.get(&file_id) { + if cached.version == version { + return Arc::clone(&cached.analysis); + } + } + + let analysis = Arc::new(compute()); + self.analysis_cache.insert( + file_id, + CachedAnalysis { + version, + analysis: Arc::clone(&analysis), + }, + ); + analysis + } + + /// Insert a precomputed analysis for `path@version` into the cache. + /// + /// This is useful when background workers (for example diagnostics) already + /// computed analysis and want subsequent requests to reuse it. + pub fn cache_analysis( + &self, + path: CanonicalPath, + version: DocVersion, + analysis: Arc, + ) { + let file_id = self.file_id_or_intern(&path); + self.analysis_cache + .insert(file_id, CachedAnalysis { version, analysis }); + } + + /// Insert a precomputed analysis for `file@version` into the cache. + pub fn cache_analysis_file( + &self, + file: FileId, + version: DocVersion, + analysis: Arc, + ) { + self.analysis_cache + .insert(file, CachedAnalysis { version, analysis }); + } + + /// Get cached semantic artifacts for a document, computing them if needed. + /// + /// Returns `None` if the document is not found. + pub fn get_semantic_artifacts(&self, path: &CanonicalPath) -> Option> { + let doc = self.get_document(path)?; + let version = doc.version(); + Some( + self.get_or_compute_semantic_artifacts(path, version, || { + SemanticArtifacts::build(&doc) + }), + ) + } + + /// Get cached semantic artifacts for `path@version`, or compute and cache them. + pub fn get_or_compute_semantic_artifacts( + &self, + path: &CanonicalPath, + version: DocVersion, + compute: F, + ) -> Arc + where + F: FnOnce() -> SemanticArtifacts, + { + let file_id = self.file_id_or_intern(path); + + if let Some(cached) = self.semantic_cache.get(&file_id) { + if cached.version == version { + return Arc::clone(&cached.artifacts); + } + } + + let artifacts = Arc::new(compute()); + self.semantic_cache.insert( + file_id, + CachedSemanticArtifacts { + version, + artifacts: Arc::clone(&artifacts), + }, + ); + artifacts + } + + /// Insert precomputed semantic artifacts for `path@version` into the cache. + pub fn cache_semantic_artifacts( + &self, + path: CanonicalPath, + version: DocVersion, + artifacts: Arc, + ) { + let file_id = self.file_id_or_intern(&path); + self.semantic_cache + .insert(file_id, CachedSemanticArtifacts { version, artifacts }); + } + + /// Eagerly refresh semantic artifacts for a tracked document. + /// + /// Returns `true` when artifacts were refreshed. + pub fn refresh_semantic_artifacts(&self, path: &CanonicalPath) -> bool { + let Some(doc) = self.get_document(path) else { + return false; + }; + let version = doc.version(); + let artifacts = Arc::new(SemanticArtifacts::build(&doc)); + self.cache_semantic_artifacts(path.clone(), version, artifacts); + true + } + + /// Invalidate the analysis cache for a document. + /// + /// Call this when a document's content changes to ensure the next + /// `get_analysis` call recomputes the analysis. + pub fn invalidate_analysis(&self, path: &CanonicalPath) { + if let Some(file_id) = self.file_id(path) { + self.invalidate_analysis_by_id(file_id); + } + } + + /// Invalidate the semantic artifact cache for a document. + pub fn invalidate_semantic_artifacts(&self, path: &CanonicalPath) { + if let Some(file_id) = self.file_id(path) { + self.invalidate_semantic_artifacts_by_id(file_id); + } + } +} + +impl DocumentSource for DocumentManager { + fn get_document_file(&self, file: FileId) -> Option { + DocumentManager::get_document_file(self, file) + } +} + +impl IntoIterator for &DocumentManager { + type Item = (CanonicalPath, Document); + type IntoIter = std::vec::IntoIter<(CanonicalPath, Document)>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +/// Shared document manager wrapped in thread-safe reference-counted pointer. +pub type SharedDocumentManager = Arc; + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use super::*; + + fn test_path(name: &str) -> CanonicalPath { + CanonicalPath::new(PathBuf::from(format!("/test/{name}.jsonnet"))) + } + + fn test_global_store() -> Arc { + Arc::new(GlobalTyStore::new()) + } + + #[test] + fn test_open_and_get() { + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); + let path = test_path("test"); + + manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); + + let doc = manager.get(&path).expect("document should exist"); + assert_eq!(doc.text(), "{ a: 1 }"); + assert_eq!(doc.version(), DocVersion::new(1)); + } + + #[test] + fn test_open_and_get_with_equivalent_path() { + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); + let path = test_path("test"); + + manager.open(path, "{ a: 1 }".to_string(), DocVersion::new(1)); + + let lookup = test_path("test"); + let doc = manager.get(&lookup).expect("document should exist"); + assert_eq!(doc.text(), "{ a: 1 }"); + assert_eq!(doc.version(), DocVersion::new(1)); + } + + #[test] + fn test_update() { + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); + let path = test_path("test"); + + manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); + assert!(manager.update(&path, "{ a: 2 }".to_string(), DocVersion::new(2))); + + let doc = manager.get(&path).expect("document should exist"); + assert_eq!(doc.text(), "{ a: 2 }"); + assert_eq!(doc.version(), DocVersion::new(2)); + } + + #[test] + fn test_close_moves_to_cache() { + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); + let path = test_path("test"); + + manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); + manager.close(&path); + + assert!(!manager.is_open(&path)); + // But the text should still be available from cache. + assert_eq!(manager.get_text(&path), Some("{ a: 1 }".to_string())); + } + + #[test] + fn test_reopen_clears_from_cache() { + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); + let path = test_path("test"); + + manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); + manager.close(&path); + + // Reopen with new content. + manager.open(path.clone(), "{ a: 2 }".to_string(), DocVersion::new(2)); + + let doc = manager.get(&path).expect("document should exist"); + assert_eq!(doc.text(), "{ a: 2 }"); + } + + #[test] + fn test_missing_path_operations_are_noop() { + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); + let path = test_path("missing"); + + assert_eq!(manager.get(&path).map(|doc| doc.text().to_string()), None); + assert!(!manager.update(&path, "{}".to_string(), DocVersion::new(1))); + assert!(!manager.apply_incremental_change( + &path, + lsp_types::Range { + start: lsp_types::Position::new(0, 0), + end: lsp_types::Position::new(0, 0), + }, + "{}", + DocVersion::new(1), + )); + } + + #[test] + fn test_multiple_documents() { + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); + + // Open multiple documents sequentially. + for i in 0..10 { + let path = test_path(&format!("test{i}")); + manager.open(path.clone(), format!("{{ a: {i} }}"), DocVersion::new(i)); + assert!(manager.is_open(&path)); + } + + assert_eq!(manager.open_count(), 10); + } + + #[test] + fn test_analysis_caching() { + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); + let path = test_path("test"); + + manager.open( + path.clone(), + "local x = 1; x".to_string(), + DocVersion::new(1), + ); + + // First call computes analysis. + let analysis1 = manager.get_analysis(&path).expect("analysis should exist"); + + // Second call returns cached (same Arc pointer). + let analysis2 = manager.get_analysis(&path).expect("analysis should exist"); + assert!( + Arc::ptr_eq(&analysis1, &analysis2), + "should return cached analysis" + ); + } + + #[test] + fn test_analysis_cache_invalidation() { + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); + let path = test_path("test"); + + manager.open( + path.clone(), + "local x = 1; x".to_string(), + DocVersion::new(1), + ); + + let analysis1 = manager.get_analysis(&path).expect("analysis should exist"); + + // Invalidate the cache. + manager.invalidate_analysis(&path); + + // Next call recomputes (different Arc pointer). + let analysis2 = manager.get_analysis(&path).expect("analysis should exist"); + assert!( + !Arc::ptr_eq(&analysis1, &analysis2), + "should recompute after invalidation" + ); + } + + #[test] + fn test_analysis_cache_version_mismatch() { + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); + let path = test_path("test"); + + manager.open( + path.clone(), + "local x = 1; x".to_string(), + DocVersion::new(1), + ); + + let analysis1 = manager.get_analysis(&path).expect("analysis should exist"); + + // Update the document (changes version). + manager.update(&path, "local y = 2; y".to_string(), DocVersion::new(2)); + + // Next call recomputes due to version mismatch. + let analysis2 = manager.get_analysis(&path).expect("analysis should exist"); + assert!( + !Arc::ptr_eq(&analysis1, &analysis2), + "should recompute after version change" + ); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/object.rs b/crates/jrsonnet-lsp-inference/src/object.rs new file mode 100644 index 00000000..6e4c0f01 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/object.rs @@ -0,0 +1,485 @@ +//! Object type inference for Jsonnet expressions. + +use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, FunctionData, ObjectData, ParamInterned, ReturnSpec, Ty, TyData, +}; +use jrsonnet_rowan_parser::nodes::{Bind, Expr, FieldName, Member, ObjBody}; +use rustc_hash::FxHashMap; + +use crate::{ + env::TypeEnv, + expr::bind_destruct_with_type_ty, + helpers::{convert_visibility_ty, extract_field_name, extract_params_with_default_types_ty}, +}; + +/// Infer the type of an object body, returning interned `Ty`. +pub fn infer_object_type_ty( + body: Option<&ObjBody>, + env: &mut TypeEnv, + infer_expr: &mut impl FnMut(&Expr, &mut TypeEnv) -> Ty, +) -> Ty { + infer_object_type_with_super_ty(body, env, None, infer_expr) +} + +/// Infer the type of an object body with an optional super type, returning interned `Ty`. +/// +/// The `super_type` is used for object extension expressions like `base { ... }`. +pub fn infer_object_type_with_super_ty( + body: Option<&ObjBody>, + env: &mut TypeEnv, + super_type: Option, + infer_expr: &mut impl FnMut(&Expr, &mut TypeEnv) -> Ty, +) -> Ty { + let Some(body) = body else { + return env.store_mut().object(ObjectData::empty()); + }; + + match body { + ObjBody::ObjBodyMemberList(members) => { + // Pass 1: Collect all field names with preliminary types + // This creates a "skeleton" of the object for self references + let mut preliminary_fields: Vec<(String, FieldVis)> = Vec::new(); + let mut preliminary_has_unknown = false; + + for member in members.members() { + match &member { + Member::MemberFieldNormal(field) => { + if let Some(field_name) = field.field_name() { + if let Some(name_str) = extract_field_name(&field_name) { + let visibility = convert_visibility_ty(field.visibility()); + preliminary_fields.push((name_str, visibility)); + } else { + preliminary_has_unknown = true; + } + } + } + Member::MemberFieldMethod(method) => { + if let Some(field_name) = method.field_name() { + if let Some(name_str) = extract_field_name(&field_name) { + let visibility = convert_visibility_ty(method.visibility()); + preliminary_fields.push((name_str, visibility)); + } else { + preliminary_has_unknown = true; + } + } + } + _ => {} + } + } + + // Build preliminary object data (all fields have Any type initially) + let mut preliminary_obj_fields: Vec<(String, FieldDefInterned)> = preliminary_fields + .iter() + .map(|(name, vis)| { + ( + name.clone(), + FieldDefInterned { + ty: Ty::ANY, + required: true, + visibility: *vis, + }, + ) + }) + .collect(); + + // If we have a super type, merge its fields into the preliminary type + if let Some(super_ty) = super_type { + if let TyData::Object(super_obj) = env.store_mut().get(super_ty) { + for (name, field_def) in &super_obj.fields { + if !preliminary_obj_fields.iter().any(|(n, _)| n == name) { + preliminary_obj_fields.push((name.clone(), field_def.clone())); + } + } + preliminary_has_unknown |= super_obj.has_unknown; + } + } + + // Sort for canonical form + preliminary_obj_fields.sort_by(|(a, _), (b, _)| a.cmp(b)); + + let preliminary_obj = ObjectData { + fields: preliminary_obj_fields, + has_unknown: preliminary_has_unknown, + }; + let preliminary_ty = env.store_mut().object(preliminary_obj); + + // Object-local bindings are scoped to this object body. + env.push_scope(); + + // Push object context for self references + env.push_object_context_ty(preliminary_ty, super_type); + + // Object-local bindings (`local x = ...`) participate in field inference. + for member in members.members() { + let Member::MemberBindStmt(bind_stmt) = member else { + continue; + }; + let Some(bind) = bind_stmt.obj_local().and_then(|obj_local| obj_local.bind()) + else { + continue; + }; + infer_object_local_bind_ty(&bind, env, infer_expr); + } + + // Pass 2: Infer actual field types with self available + let mut final_fields: Vec<(String, FieldDefInterned)> = Vec::new(); + let mut final_has_unknown = preliminary_has_unknown; + + for member in members.members() { + match member { + Member::MemberFieldNormal(field) => { + if let Some(field_name) = field.field_name() { + if let FieldName::FieldNameDynamic(dynamic) = &field_name { + final_has_unknown = true; + if let Some(expr) = dynamic.expr() { + let _ = infer_expr(&expr, env); + } + } + if let Some(name_str) = extract_field_name(&field_name) { + let field_ty = + field.expr().map_or(Ty::ANY, |e| infer_expr(&e, env)); + let visibility = convert_visibility_ty(field.visibility()); + final_fields.push(( + name_str, + FieldDefInterned { + ty: field_ty, + required: true, + visibility, + }, + )); + } + } + } + Member::MemberFieldMethod(method) => { + if let Some(field_name) = method.field_name() { + if let FieldName::FieldNameDynamic(dynamic) = &field_name { + final_has_unknown = true; + if let Some(expr) = dynamic.expr() { + let _ = infer_expr(&expr, env); + } + } + if let Some(name_str) = extract_field_name(&field_name) { + let params = method + .params_desc() + .map(|p| extract_params_with_default_types_ty(&p, env)) + .unwrap_or_default(); + let (return_ty, param_constraints) = + if env.can_infer_function_body() { + method.expr().map_or_else( + || (Ty::ANY, FxHashMap::default()), + |body| { + env.push_scope(); + let param_names: Vec = + params.iter().map(|p| p.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } + env.start_constraint_tracking(¶m_names); + env.enter_function(); + let body_ty = infer_expr(&body, env); + env.exit_function(); + let constraints = env.stop_constraint_tracking_ty(); + env.pop_scope(); + (body_ty, constraints) + }, + ) + } else { + (Ty::ANY, FxHashMap::default()) + }; + + let final_params = params + .into_iter() + .map(|param| { + let mut param_ty = param.ty; + if let Some(constraints) = + param_constraints.get(¶m.name) + { + for constraint_ty in constraints { + param_ty = env + .store_mut() + .narrow(param_ty, *constraint_ty); + } + } + jrsonnet_lsp_types::ParamInterned { + name: param.name, + ty: param_ty, + has_default: param.has_default, + } + }) + .collect(); + let visibility = convert_visibility_ty(method.visibility()); + + let func_ty = env.store_mut().function(FunctionData { + params: final_params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }); + + final_fields.push(( + name_str, + FieldDefInterned { + ty: func_ty, + required: true, + visibility, + }, + )); + } + } + } + _ => {} + } + } + + // Pop object context + env.pop_object_context(); + env.pop_scope(); + + // Sort for canonical form + final_fields.sort_by(|(a, _), (b, _)| a.cmp(b)); + + env.store_mut().object(ObjectData { + fields: final_fields, + has_unknown: final_has_unknown, + }) + } + ObjBody::ObjBodyComp(_) => { + // Object comprehension has unknown fields + env.store_mut().object(ObjectData::open()) + } + } +} + +fn infer_object_local_bind_ty( + bind: &Bind, + env: &mut TypeEnv, + infer_expr: &mut impl FnMut(&Expr, &mut TypeEnv) -> Ty, +) { + match bind { + Bind::BindDestruct(bind_destruct) => { + let Some(destruct) = bind_destruct.into() else { + return; + }; + let ty = bind_destruct + .value() + .map_or(Ty::ANY, |value| infer_expr(&value, env)); + bind_destruct_with_type_ty(&destruct, ty, env); + } + Bind::BindFunction(bind_function) => { + let Some(name_node) = bind_function.name() else { + return; + }; + let Some(ident) = name_node.ident_lit() else { + return; + }; + let name = ident.text().to_string(); + let params = bind_function + .params() + .map(|params| extract_params_with_default_types_ty(¶ms, env)) + .unwrap_or_default(); + + // Install a provisional function so recursive calls can resolve. + let provisional_ty = env.store_mut().function(FunctionData { + params: params.clone(), + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }); + env.define_ty(name.clone(), provisional_ty); + + let (return_ty, param_constraints) = if env.can_infer_function_body() { + bind_function.value().map_or_else( + || (Ty::ANY, FxHashMap::default()), + |body| { + env.push_scope(); + let param_names: Vec = + params.iter().map(|param| param.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } + env.start_constraint_tracking(¶m_names); + env.enter_function(); + let body_ty = infer_expr(&body, env); + env.exit_function(); + let constraints = env.stop_constraint_tracking_ty(); + env.pop_scope(); + (body_ty, constraints) + }, + ) + } else { + (Ty::ANY, FxHashMap::default()) + }; + + let final_params: Vec = params + .into_iter() + .map(|param| { + let mut narrowed_ty = param.ty; + if let Some(constraints) = param_constraints.get(¶m.name) { + for constraint_ty in constraints { + narrowed_ty = env.store_mut().narrow(narrowed_ty, *constraint_ty); + } + } + ParamInterned { + name: param.name, + ty: narrowed_ty, + has_default: param.has_default, + } + }) + .collect(); + + let final_ty = env.store_mut().function(FunctionData { + params: final_params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }); + env.define_ty(name, final_ty); + } + } +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeSet; + + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_types::{FunctionData, ObjectData, ParamInterned, ReturnSpec, Ty, TyData}; + + use super::*; + use crate::expr::infer_document_type_ty; + + fn infer_doc(code: &str) -> (Ty, TypeEnv) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + infer_document_type_ty(&doc) + } + + fn try_object(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Object(obj) => Some(obj), + _ => None, + } + } + + fn assert_fields_ty(obj: &ObjectData, expected: &[&str]) { + let actual: BTreeSet<_> = obj.fields.iter().map(|(name, _)| name.as_str()).collect(); + let expected: BTreeSet<_> = expected.iter().copied().collect(); + assert_eq!(actual, expected, "Field mismatch"); + } + + fn get_field_ty<'a>(obj: &'a ObjectData, name: &str) -> Option<&'a FieldDefInterned> { + obj.fields + .iter() + .find(|(n, _)| n == name) + .map(|(_, def)| def) + } + + #[test] + fn test_object_comprehension_produces_open_object() { + // Object comprehensions have dynamic keys, so they produce open objects + let (ty, env) = infer_doc("{ [k]: v for k in ['a', 'b'] for v in [1, 2] }"); + let obj = try_object(&env, ty).expect("expected object"); + assert!( + obj.has_unknown, + "Object comprehension should produce open object" + ); + } + + #[test] + fn test_regular_object_is_closed() { + // Regular objects with explicit fields are closed + let (ty, env) = infer_doc("{ a: 1, b: 2 }"); + let obj = try_object(&env, ty).expect("expected object"); + assert!(!obj.has_unknown, "Regular object should be closed"); + assert_fields_ty(&obj, &["a", "b"]); + } + + #[test] + fn test_dynamic_field_object_is_open() { + let (ty, env) = infer_doc(r#"{ [("x" + "y")]: 1 }"#); + let obj = try_object(&env, ty).expect("expected object"); + assert!(obj.has_unknown, "Dynamic field object should be open"); + } + + fn try_function(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Function(func) => Some(func), + _ => None, + } + } + + #[test] + fn test_method_field_inference() { + // Methods should be inferred as functions + let (ty, env) = infer_doc("{ greet(name): name }"); + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["greet"]); + + let field_def = get_field_ty(&obj, "greet").expect("Should have 'greet' field"); + let func = try_function(&env, field_def.ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "name".to_string(), + ty: Ty::ANY, + has_default: false + }], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + } + ); + } + + #[test] + fn test_method_field_infers_body_return_type() { + let (ty, env) = infer_doc("{ inc(x):: x + 1 }"); + let obj = try_object(&env, ty).expect("expected object"); + let field_def = get_field_ty(&obj, "inc").expect("Should have 'inc' field"); + let func = try_function(&env, field_def.ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_method_field_with_assert_still_infers_body_return_type() { + let (ty, env) = infer_doc("{\n inc(x)::\n assert std.isNumber(x);\n x + 1,\n}"); + let obj = try_object(&env, ty).expect("expected object"); + let field_def = get_field_ty(&obj, "inc").expect("Should have 'inc' field"); + let func = try_function(&env, field_def.ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_object_local_binding_infers_field_type() { + let (ty, env) = infer_doc("{ local x = 1, z: x }"); + let obj = try_object(&env, ty).expect("expected object"); + let field_def = get_field_ty(&obj, "z").expect("Should have 'z' field"); + assert_eq!(field_def.ty, Ty::NUMBER); + } + + #[test] + fn test_object_local_function_binding_infers_field_type() { + let (ty, env) = infer_doc("{ local one() = 1, z: one() }"); + let obj = try_object(&env, ty).expect("expected object"); + let field_def = get_field_ty(&obj, "z").expect("Should have 'z' field"); + assert_eq!(field_def.ty, Ty::NUMBER); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/poly.rs b/crates/jrsonnet-lsp-inference/src/poly.rs new file mode 100644 index 00000000..8eeab153 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/poly.rs @@ -0,0 +1,559 @@ +//! Polymorphic type instantiation for function calls. + +use jrsonnet_lsp_types::{NumBounds, ReturnSpec, Ty, TyData, TyStore, TySubstitution}; + +/// Instantiate a polymorphic function call (Ty-native version). +/// +/// When calling a function that contains type variables in its parameter or return types, +/// this function builds a substitution by matching formal parameter types with actual +/// argument types, then applies that substitution to the return type. +/// +/// For example, if we have: +/// - Function type: `(T) -> Array` where T is a type variable +/// - Actual argument type: `Number` +/// +/// This function will: +/// 1. Build substitution: `{T -> Number}` +/// 2. Apply to return type: `Array` -> `Array` +/// 3. Return `Array` +/// +/// If the function has no type variables, the standard return type resolution is used. +pub fn instantiate_function_call_ty(func_ty: Ty, arg_types: &[Ty], store: &mut TyStore) -> Ty { + // Check if the function type has any type variables + if !store.has_type_vars(func_ty) { + // No type variables - resolve from return spec directly. + if let TyData::Function(func_data) = store.get(func_ty).clone() { + return resolve_return_spec_ty( + &func_data.return_spec, + arg_types, + &TySubstitution::new(), + store, + ); + } + return Ty::ANY; + } + + // Get the function data + let TyData::Function(func_data) = store.get(func_ty).clone() else { + return Ty::ANY; + }; + + // Build a substitution by matching parameter types with argument types + let mut substitution = TySubstitution::new(); + + for (param, &arg_ty) in func_data.params.iter().zip(arg_types.iter()) { + collect_type_var_substitutions_ty(param.ty, arg_ty, &mut substitution, store); + } + + resolve_return_spec_ty(&func_data.return_spec, arg_types, &substitution, store) +} + +fn resolve_return_spec_ty( + return_spec: &ReturnSpec, + arg_types: &[Ty], + substitution: &TySubstitution, + store: &mut TyStore, +) -> Ty { + let applied_arg_ty = |idx: usize, store: &mut TyStore| { + arg_types + .get(idx) + .copied() + .map_or(Ty::ANY, |ty| store.apply_substitution(ty, substitution)) + }; + + match return_spec { + ReturnSpec::Fixed(ret) => store.apply_substitution(*ret, substitution), + ReturnSpec::SameAsArg(idx) => applied_arg_ty(*idx, store), + ReturnSpec::ArrayOfArg(idx) => { + let ty = applied_arg_ty(*idx, store); + store.array(ty) + } + ReturnSpec::ArrayWithSameElements(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + match store.get(arg_ty).clone() { + TyData::Array { elem, .. } => store.array(elem), + TyData::Tuple { elems } => { + let elem_union = store.union(elems); + store.array(elem_union) + } + _ => store.array(Ty::ANY), + } + } + ReturnSpec::SetWithSameElements(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + match store.get(arg_ty).clone() { + TyData::Array { elem, .. } => store.array_set(elem), + TyData::Tuple { elems } => { + let elem_union = store.union(elems); + store.array_set(elem_union) + } + _ => store.array_set(Ty::ANY), + } + } + ReturnSpec::FuncReturnType(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + match store.get(arg_ty).clone() { + TyData::Function(func_data) => match func_data.return_spec { + ReturnSpec::Fixed(ret) => store.apply_substitution(ret, substitution), + _ => Ty::ANY, + }, + _ => Ty::ANY, + } + } + ReturnSpec::ArrayOfFuncReturn(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + let ret_ty = match store.get(arg_ty).clone() { + TyData::Function(func_data) => match func_data.return_spec { + ReturnSpec::Fixed(ret) => store.apply_substitution(ret, substitution), + _ => Ty::ANY, + }, + _ => Ty::ANY, + }; + store.array(ret_ty) + } + ReturnSpec::FlatMapResult(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + match store.get(arg_ty).clone() { + TyData::Function(func_data) => match func_data.return_spec { + ReturnSpec::Fixed(ret) => { + let applied_ret = store.apply_substitution(ret, substitution); + match store.get(applied_ret).clone() { + TyData::Array { elem, .. } => store.array(elem), + _ => store.array(Ty::ANY), + } + } + _ => store.array(Ty::ANY), + }, + _ => store.array(Ty::ANY), + } + } + ReturnSpec::NonNegative => store.bounded_number(NumBounds::non_negative()), + ReturnSpec::ObjectValuesType(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + match store.get(arg_ty).clone() { + TyData::Object(obj_data) => { + if obj_data.fields.is_empty() { + store.array(Ty::ANY) + } else { + let field_types: Vec = obj_data + .fields + .into_iter() + .map(|(_, field)| field.ty) + .collect(); + let field_union = store.union(field_types); + store.array(field_union) + } + } + _ => store.array(Ty::ANY), + } + } + } +} + +/// Collect type variable substitutions by matching a pattern type against a concrete type (Ty-native). +/// +/// This is a simple unification that collects substitutions for type variables. +/// It doesn't do full bidirectional unification - it just assigns concrete types +/// to type variables when the pattern contains a variable and the target is concrete. +pub fn collect_type_var_substitutions_ty( + pattern: Ty, + target: Ty, + substitution: &mut TySubstitution, + store: &mut TyStore, +) { + // Clone data to avoid borrow issues + let pattern_data = store.get(pattern).clone(); + let target_data = store.get(target).clone(); + + match (&pattern_data, &target_data) { + // Type variable matches anything - record the substitution + (TyData::TypeVar { id, constraints }, _) + if !matches!(target_data, TyData::TypeVar { .. }) => + { + // Check that the target satisfies constraints + if constraints.satisfied_by(target, store) { + // Occurs check: don't substitute if it creates infinite type + if !TySubstitution::occurs_in(*id, target, store) { + substitution.insert(*id, target); + } + } + } + + // Array types - recurse into element types + (TyData::Array { elem: pat_elem, .. }, TyData::Array { elem: tgt_elem, .. }) => { + collect_type_var_substitutions_ty(*pat_elem, *tgt_elem, substitution, store); + } + + // Tuple types - match element-wise + (TyData::Tuple { elems: pat_elems }, TyData::Tuple { elems: tgt_elems }) => { + for (pe, te) in pat_elems.iter().zip(tgt_elems.iter()) { + collect_type_var_substitutions_ty(*pe, *te, substitution, store); + } + } + + // Also handle Array vs Tuple (common case: Array matched against [1, 2, 3]) + (TyData::Array { elem: pat_elem, .. }, TyData::Tuple { elems: tgt_elems }) + if !tgt_elems.is_empty() => + { + let tuple_elem_union = store.union(tgt_elems.clone()); + collect_type_var_substitutions_ty(*pat_elem, tuple_elem_union, substitution, store); + } + + // Object types - match field types + (TyData::Object(pat_obj), TyData::Object(tgt_obj)) => { + for (field_name, pat_field) in &pat_obj.fields { + if let Some(tgt_field) = tgt_obj.fields.iter().find(|(n, _)| n == field_name) { + collect_type_var_substitutions_ty( + pat_field.ty, + tgt_field.1.ty, + substitution, + store, + ); + } + } + } + + // AttrsOf types - match value types + (TyData::AttrsOf { value: pat_val }, TyData::AttrsOf { value: tgt_val }) => { + collect_type_var_substitutions_ty(*pat_val, *tgt_val, substitution, store); + } + + // Function types - match param and return types + (TyData::Function(pat_fn), TyData::Function(tgt_fn)) => { + // Match parameter types + for (pp, tp) in pat_fn.params.iter().zip(tgt_fn.params.iter()) { + collect_type_var_substitutions_ty(pp.ty, tp.ty, substitution, store); + } + // Match return types + if let (ReturnSpec::Fixed(pat_ret), ReturnSpec::Fixed(tgt_ret)) = + (&pat_fn.return_spec, &tgt_fn.return_spec) + { + collect_type_var_substitutions_ty(*pat_ret, *tgt_ret, substitution, store); + } + } + + // Union types - try to match with each variant + (TyData::Union(pat_variants), _) => { + for &pv in pat_variants { + collect_type_var_substitutions_ty(pv, target, substitution, store); + } + } + + // All other cases - no substitution to collect + _ => {} + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_types::{FunctionData, ParamInterned, TyConstraints, TyVarId}; + + use super::*; + + #[test] + fn test_instantiate_ty_non_polymorphic() { + let mut store = TyStore::new(); + + // Create function(x: Number) -> String + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(Ty::STRING), + variadic: false, + })); + + let result = instantiate_function_call_ty(func_ty, &[Ty::NUMBER], &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_instantiate_ty_identity_function() { + let mut store = TyStore::new(); + + // Create type variable T + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + + // Create function(x: T) -> T + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: t_var, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(t_var), + variadic: false, + })); + + let result = instantiate_function_call_ty(func_ty, &[Ty::NUMBER], &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_instantiate_ty_array_element() { + let mut store = TyStore::new(); + + // Create type variable T + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + + // Create Array + let array_t = store.array(t_var); + + // Create function(arr: Array) -> T + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "arr".to_string(), + ty: array_t, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(t_var), + variadic: false, + })); + + // Call with Array + let array_string = store.array(Ty::STRING); + let result = instantiate_function_call_ty(func_ty, &[array_string], &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_instantiate_ty_map_function() { + let mut store = TyStore::new(); + + // Create type variables T and U + let t_id = TyVarId::fresh(); + let u_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + let u_var = store.type_var(u_id, TyConstraints::none()); + + // Create callback type: (T) -> U + let callback_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: t_var, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(u_var), + variadic: false, + })); + + // Create Array and Array + let array_t = store.array(t_var); + let array_u = store.array(u_var); + + // Create function(fn: (T) -> U, arr: Array) -> Array + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ + ParamInterned { + name: "fn".to_string(), + ty: callback_ty, + has_default: false, + }, + ParamInterned { + name: "arr".to_string(), + ty: array_t, + has_default: false, + }, + ], + return_spec: ReturnSpec::Fixed(array_u), + variadic: false, + })); + + // Create concrete callback: (Number) -> String + let concrete_callback = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(Ty::STRING), + variadic: false, + })); + + // Create Array + let array_number = store.array(Ty::NUMBER); + + // Instantiate + let result = + instantiate_function_call_ty(func_ty, &[concrete_callback, array_number], &mut store); + + // Should return Array + let expected = store.array(Ty::STRING); + assert_eq!(result, expected); + } + + #[test] + fn test_instantiate_ty_nested_arrays() { + let mut store = TyStore::new(); + + // Create type variable T + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + + // Create Array> + let array_t = store.array(t_var); + let array_array_t = store.array(array_t); + + // Create function(arr: Array>) -> Array + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "arr".to_string(), + ty: array_array_t, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(array_t), + variadic: false, + })); + + // Call with Array> + let array_string = store.array(Ty::STRING); + let array_array_string = store.array(array_string); + let result = instantiate_function_call_ty(func_ty, &[array_array_string], &mut store); + + // Should return Array + let expected = store.array(Ty::STRING); + assert_eq!(result, expected); + } + + #[test] + fn test_ty_substitution_basic() { + let mut store = TyStore::new(); + + // Create type variable T + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + + // Create substitution T -> Number + let mut sub = TySubstitution::new(); + collect_type_var_substitutions_ty(t_var, Ty::NUMBER, &mut sub, &mut store); + + assert_eq!(sub.get(t_id), Some(Ty::NUMBER)); + } + + #[test] + fn test_ty_substitution_array() { + let mut store = TyStore::new(); + + // Create type variable T and Array + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + let array_t = store.array(t_var); + + // Create Array + let array_string = store.array(Ty::STRING); + + // Collect substitutions + let mut sub = TySubstitution::new(); + collect_type_var_substitutions_ty(array_t, array_string, &mut sub, &mut store); + + assert_eq!(sub.get(t_id), Some(Ty::STRING)); + } + + #[test] + fn test_ty_substitution_with_constraints() { + let mut store = TyStore::new(); + + // Create type variable T with indexable constraint + let t_id = TyVarId::fresh(); + let t_var = store.type_var( + t_id, + TyConstraints { + must_be_indexable: true, + ..TyConstraints::none() + }, + ); + + // Number is not indexable - should not substitute + let mut sub = TySubstitution::new(); + collect_type_var_substitutions_ty(t_var, Ty::NUMBER, &mut sub, &mut store); + assert_eq!(sub.get(t_id), None); + + // Array IS indexable - should substitute + let array_num = store.array(Ty::NUMBER); + collect_type_var_substitutions_ty(t_var, array_num, &mut sub, &mut store); + assert_eq!(sub.get(t_id), Some(array_num)); + } + + #[test] + fn test_dynamic_return_spec_without_type_vars() { + let mut store = TyStore::new(); + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: false, + }], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + })); + + let result = instantiate_function_call_ty(func_ty, &[Ty::NUMBER], &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_dynamic_return_spec_with_type_vars() { + let mut store = TyStore::new(); + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: t_var, + has_default: false, + }], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + })); + + let result = instantiate_function_call_ty(func_ty, &[Ty::STRING], &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_array_pattern_matches_tuple_as_union() { + let mut store = TyStore::new(); + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + let array_t = store.array(t_var); + let tuple_number_string = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + + let mut substitution = TySubstitution::new(); + collect_type_var_substitutions_ty( + array_t, + tuple_number_string, + &mut substitution, + &mut store, + ); + + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(substitution.get(t_id), Some(expected)); + } + + #[test] + fn test_ty_apply_substitution() { + let mut store = TyStore::new(); + + // Create type variable T + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + + // Create Array + let array_t = store.array(t_var); + + // Create substitution T -> Number + let mut sub = TySubstitution::new(); + sub.insert(t_id, Ty::NUMBER); + + // Apply substitution + let result = store.apply_substitution(array_t, &sub); + + // Should be Array + let expected = store.array(Ty::NUMBER); + assert_eq!(result, expected); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs new file mode 100644 index 00000000..a8ef2e55 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -0,0 +1,434 @@ +//! Type provider for cross-file type analysis. +//! +//! Provides type analysis with proper dependency handling, ensuring that +//! imports have their types resolved before analyzing the target file. + +use std::sync::Arc; + +use jrsonnet_lsp_document::{CanonicalPath, Document, FileId}; +use jrsonnet_lsp_import::{ImportGraph, ImportKind}; +use jrsonnet_lsp_types::GlobalTyStore; +use parking_lot::RwLock; +use rayon::prelude::*; + +use crate::{ + analysis::TypeAnalysis, + type_cache::{ + analyze_and_cache_file_with_resolved_import_map, CachingImportResolver, ResolvedImportMap, + SharedTypeCache, + }, +}; + +/// Trait for looking up documents by path. +/// +/// This allows `TypeProvider` to work with different document storage +/// implementations (e.g., `DocumentManager`, `DashMap`). +pub trait DocumentSource { + /// Get a document by interned file id, if it exists. + fn get_document_file(&self, file: FileId) -> Option; +} + +/// Provides type analysis with proper dependency handling. +/// +/// When analyzing a file, ensures all its imports are analyzed first +/// (in topological order) so that import types are available. +/// +/// # Example +/// +/// ```ignore +/// let provider = TypeProvider::new(type_cache, import_graph, global_types); +/// +/// // This ensures all dependencies are analyzed before the target file +/// let analysis = provider.analyze(&path, &doc, &doc_manager); +/// let ty = analysis.type_at_position(offset); +/// ``` +pub struct TypeProvider { + /// Type cache for storing analyzed types. + type_cache: SharedTypeCache, + /// Import graph for dependency information. + import_graph: Arc>, + /// Global type store. + global_types: Arc, +} + +impl TypeProvider { + /// Create a new type provider. + pub fn new( + type_cache: SharedTypeCache, + import_graph: Arc>, + global_types: Arc, + ) -> Self { + Self { + type_cache, + import_graph, + global_types, + } + } + + /// Analyze a file with all its dependencies pre-analyzed. + /// + /// Uses topological ordering to ensure dependencies are analyzed first, + /// so that import types are available when analyzing the target file. + /// + /// The `doc_source` parameter provides access to documents for dependency analysis. + pub fn analyze( + &self, + path: &CanonicalPath, + doc: &Document, + doc_source: &D, + ) -> TypeAnalysis { + // Ensure dependencies are analyzed first (in topological order) + self.ensure_dependencies_analyzed(path, doc_source); + let resolved_imports = { + let graph = self.import_graph.read(); + graph + .file(path) + .map_or_else(ResolvedImportMap::default, |file| { + resolved_imports_for(&graph, file) + }) + }; + + // Analyze with import resolution + let import_resolver = Arc::new(CachingImportResolver::with_resolved_import_map( + resolved_imports, + Arc::clone(&self.type_cache), + )); + + TypeAnalysis::analyze_with_resolver(doc, Arc::clone(&self.global_types), import_resolver) + } + + /// Ensure all dependencies of a file are analyzed and cached. + /// + /// Uses topological processing to analyze dependencies before dependents. + fn ensure_dependencies_analyzed( + &self, + path: &CanonicalPath, + doc_source: &D, + ) { + let dependency_levels = { + let graph = self.import_graph.read(); + let Some(root) = graph.file(path) else { + return; + }; + + // Snapshot dependencies and resolved imports under the read lock, then + // release the lock before expensive analysis work. + graph + .dependency_levels(root, |entry| entry.kind == ImportKind::Code) + .into_iter() + .map(|level| { + level + .into_iter() + .map(|dep_file| (dep_file, resolved_imports_for(&graph, dep_file))) + .collect::>() + }) + .collect::>() + }; + + for level in dependency_levels { + level + .into_par_iter() + .for_each(|(dep_file, resolved_imports)| { + let Some(doc) = doc_source.get_document_file(dep_file) else { + return; + }; + analyze_and_cache_file_with_resolved_import_map( + dep_file, + &doc, + &self.type_cache, + resolved_imports, + ); + }); + } + } + + /// Get the global type store. + #[must_use] + pub fn global_types(&self) -> &Arc { + &self.global_types + } + + /// Get the type cache. + #[must_use] + pub fn type_cache(&self) -> &SharedTypeCache { + &self.type_cache + } +} + +fn resolved_imports_for(graph: &ImportGraph, file: FileId) -> ResolvedImportMap { + graph.resolved_code_import_map_arc(file).unwrap_or_default() +} + +#[cfg(test)] +mod tests { + use dashmap::DashMap; + use jrsonnet_lsp_document::{CanonicalPath, DocVersion, FileId, PathResolver, PathStore}; + use jrsonnet_lsp_types::Ty; + + use super::*; + use crate::type_cache::new_shared_cache; + + /// Test document source backed by a `DashMap`. + struct TestDocSource { + paths: PathStore, + resolver: PathResolver, + docs: DashMap, + } + + impl TestDocSource { + fn new(paths: PathStore) -> Self { + let resolver = paths.resolver(); + Self { + paths, + resolver, + docs: DashMap::new(), + } + } + + fn insert(&self, path: CanonicalPath, doc: Document) { + let file_id = self.paths.intern(&path); + self.docs.insert(file_id, doc); + } + } + + impl DocumentSource for TestDocSource { + fn get_document_file(&self, file: FileId) -> Option { + self.docs.get(&file).map(|r| r.clone()) + } + } + + impl TestDocSource { + fn path(&self, file: FileId) -> Option> { + self.resolver.path(file) + } + } + + #[test] + fn test_provider_doc_source_file_lookup_round_trip() { + let path_store = PathStore::new(); + let doc_source = TestDocSource::new(path_store.clone()); + let path = test_path("roundtrip.jsonnet"); + let doc = Document::new("42".to_string(), DocVersion(1)); + doc_source.insert(path.clone(), doc.clone()); + + let file = path_store + .resolver() + .file(&path) + .expect("path should have been interned"); + let loaded = doc_source + .get_document_file(file) + .expect("document should be retrievable by file id"); + assert_eq!(loaded.text(), doc.text()); + assert_eq!(loaded.version(), doc.version()); + assert_eq!( + doc_source.path(file).map(|p| p.as_ref().clone()), + Some(path) + ); + } + + fn test_path(name: &str) -> CanonicalPath { + CanonicalPath::new(std::path::PathBuf::from(format!("/test/{name}"))) + } + + #[test] + fn test_provider_analyze_simple() { + let global_types = Arc::new(GlobalTyStore::new()); + let path_store = PathStore::new(); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store))); + let doc_source = TestDocSource::new(PathStore::new()); + + let provider = TypeProvider::new(type_cache, import_graph, global_types); + + // Add a simple document + let path = test_path("simple.jsonnet"); + let doc = Document::new("42".to_string(), DocVersion(1)); + doc_source.insert(path.clone(), doc.clone()); + + // Analyze + let analysis = provider.analyze(&path, &doc, &doc_source); + let ty = analysis.document_type(); + + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_provider_ensures_dependencies_analyzed() { + let global_types = Arc::new(GlobalTyStore::new()); + let path_store = PathStore::new(); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); + let doc_source = TestDocSource::new(path_store.clone()); + + // Add imported file + let dep_path = test_path("dep.jsonnet"); + let dep_doc = Document::new("{ value: 42 }".to_string(), DocVersion(1)); + doc_source.insert(dep_path.clone(), dep_doc); + + // Add main file that imports dep + let main_path = test_path("main.jsonnet"); + let main_doc = Document::new("42".to_string(), DocVersion(1)); + doc_source.insert(main_path.clone(), main_doc.clone()); + + // Update import graph to show main imports dep + { + let mut graph = import_graph.write(); + let main_file = graph.intern(&main_path); + let dep_file = graph.intern(&dep_path); + graph.update_file_with_entries( + main_file, + vec![jrsonnet_lsp_import::ImportEntry { + kind: jrsonnet_lsp_import::ImportKind::Code, + import_path: "dep.jsonnet".to_string(), + resolved_file: Some(dep_file), + resolved_path: Some(dep_path.clone()), + binding_name: None, + }], + ); + } + + let provider = TypeProvider::new( + Arc::clone(&type_cache), + Arc::clone(&import_graph), + Arc::clone(&global_types), + ); + + // Before analyzing main, dep should not be in the cache + let dep_file = path_store + .resolver() + .file(&dep_path) + .expect("dependency should be interned in path store"); + assert!(type_cache.read().get(dep_file).is_none()); + + // Analyze main - this should trigger dependency analysis + let _analysis = provider.analyze(&main_path, &main_doc, &doc_source); + + // After analyzing main, dep should be in the cache + // (because ensure_dependencies_analyzed processes it first) + assert!(type_cache.read().get(dep_file).is_some()); + } + + #[test] + fn test_provider_uses_graph_resolved_import_paths() { + let global_types = Arc::new(GlobalTyStore::new()); + let path_store = PathStore::new(); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); + let doc_source = TestDocSource::new(path_store); + + let dep_path = test_path("deps/dep.jsonnet"); + let dep_doc = Document::new("42".to_string(), DocVersion(1)); + doc_source.insert(dep_path.clone(), dep_doc); + + let main_path = test_path("main.jsonnet"); + let main_doc = Document::new(r#"import "vendor/dep.jsonnet""#.to_string(), DocVersion(1)); + doc_source.insert(main_path.clone(), main_doc.clone()); + + { + let mut graph = import_graph.write(); + let main_file = graph.intern(&main_path); + let dep_file = graph.intern(&dep_path); + graph.update_file_with_entries( + main_file, + vec![jrsonnet_lsp_import::ImportEntry { + kind: jrsonnet_lsp_import::ImportKind::Code, + import_path: "vendor/dep.jsonnet".to_string(), + resolved_file: Some(dep_file), + resolved_path: Some(dep_path), + binding_name: None, + }], + ); + } + + let provider = TypeProvider::new( + Arc::clone(&type_cache), + Arc::clone(&import_graph), + Arc::clone(&global_types), + ); + + let analysis = provider.analyze(&main_path, &main_doc, &doc_source); + assert_eq!(analysis.document_type(), Ty::NUMBER); + } + + #[test] + fn test_provider_importstr_infers_string() { + let global_types = Arc::new(GlobalTyStore::new()); + let path_store = PathStore::new(); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); + let doc_source = TestDocSource::new(path_store); + + let main_path = test_path("main.jsonnet"); + let main_doc = Document::new(r#"importstr "./script.k""#.to_string(), DocVersion(1)); + doc_source.insert(main_path.clone(), main_doc.clone()); + + let script_path = test_path("script.k"); + + { + let mut graph = import_graph.write(); + let main_file = graph.intern(&main_path); + let script_file = graph.intern(&script_path); + graph.update_file_with_entries( + main_file, + vec![jrsonnet_lsp_import::ImportEntry { + kind: jrsonnet_lsp_import::ImportKind::String, + import_path: "./script.k".to_string(), + resolved_file: Some(script_file), + resolved_path: Some(script_path), + binding_name: None, + }], + ); + } + + let provider = TypeProvider::new( + Arc::clone(&type_cache), + Arc::clone(&import_graph), + Arc::clone(&global_types), + ); + + let analysis = provider.analyze(&main_path, &main_doc, &doc_source); + assert_eq!(analysis.document_type(), Ty::STRING); + } + + #[test] + fn test_provider_importbin_infers_bounded_byte_array() { + let global_types = Arc::new(GlobalTyStore::new()); + let path_store = PathStore::new(); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); + let doc_source = TestDocSource::new(path_store); + + let main_path = test_path("main.jsonnet"); + let main_doc = Document::new(r#"importbin "./script.k""#.to_string(), DocVersion(1)); + doc_source.insert(main_path.clone(), main_doc.clone()); + + let script_path = test_path("script.k"); + + { + let mut graph = import_graph.write(); + let main_file = graph.intern(&main_path); + let script_file = graph.intern(&script_path); + graph.update_file_with_entries( + main_file, + vec![jrsonnet_lsp_import::ImportEntry { + kind: jrsonnet_lsp_import::ImportKind::Binary, + import_path: "./script.k".to_string(), + resolved_file: Some(script_file), + resolved_path: Some(script_path), + binding_name: None, + }], + ); + } + + let provider = TypeProvider::new( + Arc::clone(&type_cache), + Arc::clone(&import_graph), + Arc::clone(&global_types), + ); + + let analysis = provider.analyze(&main_path, &main_doc, &doc_source); + let ty = analysis.document_type(); + assert!(analysis.is_array(ty)); + assert_eq!(analysis.display(ty), "array"); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/semantic_artifacts.rs b/crates/jrsonnet-lsp-inference/src/semantic_artifacts.rs new file mode 100644 index 00000000..c3a87632 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/semantic_artifacts.rs @@ -0,0 +1,340 @@ +//! Per-document semantic artifacts for request-time lookups. +//! +//! These artifacts are computed once per document version and reused across +//! definition/completion/references requests. + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_import::extract_import_path; +use jrsonnet_lsp_scope::{is_definition_site, is_variable_reference, ScopeIndex, ScopeResolver}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, Expr, ExprBase, ExprField, ForSpec, Param}, + AstNode, SyntaxKind, SyntaxToken, +}; +use rowan::{TextRange, TextSize}; +use rustc_hash::FxHashMap; + +use crate::{trace_expr, ConstEvalResult}; + +/// Binding kind used by semantic artifacts. +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub enum SemanticBindingKind { + LocalVariable, + LocalFunction, + Parameter, + ForVariable, +} + +/// Canonical import target for a definition. +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum SemanticImportTarget { + Import { path: String }, + ImportField { path: String, fields: Vec }, +} + +/// Cached info for a binding definition. +#[derive(Debug, Clone)] +pub struct DefinitionBindingInfo { + pub kind: SemanticBindingKind, + pub value_expr_range: Option, + pub alias_definition: Option, + pub import_target: Option, +} + +/// Visible binding at a position. +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct SemanticVisibleBinding { + pub name: String, + pub kind: SemanticBindingKind, + pub range: TextRange, +} + +/// Per-file semantic index for fast request-time lookups. +pub struct SemanticArtifacts { + scope_index: ScopeIndex, + /// Map from identifier token start to definition name-range. + reference_to_definition: FxHashMap, + /// Map from definition name-range to all identifier token ranges (definition + refs). + references_by_definition: FxHashMap>, + /// Map from definition range to binding info. + definition_bindings: FxHashMap, + /// Map from definition range to binding kind. + binding_kinds: FxHashMap, + /// Map from identifier token start to smallest enclosing expression range. + expr_at_token_start: FxHashMap, + /// Map keyed by `(import_binding_name, field_name)` to field identifier ranges. + import_field_references: FxHashMap<(String, String), Vec>, +} + +impl SemanticArtifacts { + /// Build semantic artifacts for `document`. + #[must_use] + pub fn build(document: &Document) -> Self { + let ast = document.ast(); + let root = ast.syntax(); + let scope_index = ScopeIndex::new(root); + let scope_resolver = ScopeResolver::new(root); + + let mut reference_to_definition = FxHashMap::default(); + let mut references_by_definition: FxHashMap> = + FxHashMap::default(); + let mut expr_at_token_start = FxHashMap::default(); + + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT { + continue; + } + + let start = token.text_range().start(); + if let Some(expr_range) = smallest_expr_range_for_token(&token) { + expr_at_token_start.insert(start, expr_range); + } + + if is_definition_site(&token) { + if let Some(parent) = token.parent() { + references_by_definition + .entry(parent.text_range()) + .or_default() + .push(token.text_range()); + } + continue; + } + + if !is_variable_reference(&token) { + continue; + } + + if let Some(definition_range) = scope_resolver.get_definition(&token) { + reference_to_definition.insert(start, definition_range); + references_by_definition + .entry(definition_range) + .or_default() + .push(token.text_range()); + } + } + + for ranges in references_by_definition.values_mut() { + ranges.sort_by_key(|range| range.start()); + ranges.dedup(); + } + + let mut definition_bindings = FxHashMap::default(); + let mut binding_kinds = FxHashMap::default(); + + for bind in root.descendants().filter_map(Bind::cast) { + let Some((definition_range, kind, value_expr)) = bind_definition_data(&bind) else { + continue; + }; + + let value_expr_range = value_expr.as_ref().map(|expr| expr.syntax().text_range()); + let alias_definition = value_expr + .as_ref() + .and_then(|expr| alias_definition_for_expr(expr, &scope_index)); + let import_target = value_expr + .as_ref() + .and_then(|expr| import_target_for_expr(expr, document)); + + binding_kinds.insert(definition_range, kind); + definition_bindings.insert( + definition_range, + DefinitionBindingInfo { + kind, + value_expr_range, + alias_definition, + import_target, + }, + ); + } + + for param in root.descendants().filter_map(Param::cast) { + if let Some(range) = param_definition_range(¶m) { + binding_kinds.insert(range, SemanticBindingKind::Parameter); + } + } + + for for_spec in root.descendants().filter_map(ForSpec::cast) { + if let Some(range) = for_spec_definition_range(&for_spec) { + binding_kinds.insert(range, SemanticBindingKind::ForVariable); + } + } + + let mut import_field_references: FxHashMap<(String, String), Vec> = + FxHashMap::default(); + for field in root.descendants().filter_map(ExprField::cast) { + let Some(field_ident) = field.field().and_then(|name| name.ident_lit()) else { + continue; + }; + let Some(ExprBase::ExprVar(var)) = field.base().and_then(|expr| expr.expr_base()) + else { + continue; + }; + let Some(base_ident) = var.name().and_then(|name| name.ident_lit()) else { + continue; + }; + + import_field_references + .entry(( + base_ident.text().to_string(), + field_ident.text().to_string(), + )) + .or_default() + .push(field_ident.text_range()); + } + for ranges in import_field_references.values_mut() { + ranges.sort_by_key(|range| range.start()); + ranges.dedup(); + } + + Self { + scope_index, + reference_to_definition, + references_by_definition, + definition_bindings, + binding_kinds, + expr_at_token_start, + import_field_references, + } + } + + /// Resolve a definition range from an identifier token. + pub fn definition_for_ident_token(&self, token: &SyntaxToken) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + if is_definition_site(token) { + return token.parent().map(|parent| parent.text_range()); + } + + if !is_variable_reference(token) { + return None; + } + + self.reference_to_definition + .get(&token.text_range().start()) + .copied() + .or_else(|| { + self.scope_index + .find_definition(token.text_range().start(), token.text()) + }) + } + + /// Return all identifier ranges for `definition_range`. + #[must_use] + pub fn references_for_definition(&self, definition_range: TextRange) -> &[TextRange] { + self.references_by_definition + .get(&definition_range) + .map_or(&[], Vec::as_slice) + } + + /// Return visible bindings at `position`. + pub fn visible_bindings_at(&self, position: TextSize) -> Vec { + self.scope_index + .bindings_at(position) + .into_iter() + .map(|(name, range)| SemanticVisibleBinding { + kind: self + .binding_kinds + .get(&range) + .copied() + .unwrap_or(SemanticBindingKind::LocalVariable), + name, + range, + }) + .collect() + } + + /// Return cached binding info for a definition range. + #[must_use] + pub fn binding_info(&self, definition_range: TextRange) -> Option<&DefinitionBindingInfo> { + self.definition_bindings.get(&definition_range) + } + + /// Return smallest cached expression range at token start position. + #[must_use] + pub fn expr_at_token_start(&self, token_start: TextSize) -> Option { + self.expr_at_token_start.get(&token_start).copied() + } + + /// Return cached `binding.field` identifier ranges in this file. + #[must_use] + pub fn import_field_references(&self, binding_name: &str, field_name: &str) -> &[TextRange] { + self.import_field_references + .get(&(binding_name.to_string(), field_name.to_string())) + .map_or(&[], Vec::as_slice) + } +} + +fn smallest_expr_range_for_token(token: &SyntaxToken) -> Option { + token + .parent_ancestors() + .filter_map(Expr::cast) + .map(|expr| expr.syntax().text_range()) + .min_by_key(|range| range.len()) +} + +fn bind_definition_data(bind: &Bind) -> Option<(TextRange, SemanticBindingKind, Option)> { + match bind { + Bind::BindDestruct(bind) => { + let destruct = bind.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(( + full.name()?.syntax().text_range(), + SemanticBindingKind::LocalVariable, + bind.value(), + )) + } + Bind::BindFunction(bind) => Some(( + bind.name()?.syntax().text_range(), + SemanticBindingKind::LocalFunction, + bind.value(), + )), + } +} + +fn param_definition_range(param: &Param) -> Option { + let destruct = param.destruct()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) +} + +fn for_spec_definition_range(for_spec: &ForSpec) -> Option { + let destruct = for_spec.bind()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) +} + +fn alias_definition_for_expr(expr: &Expr, scope_index: &ScopeIndex) -> Option { + let ExprBase::ExprVar(var) = expr.expr_base()? else { + return None; + }; + let ident = var.name()?.ident_lit()?; + scope_index.find_definition(ident.text_range().start(), ident.text()) +} + +fn import_target_for_expr(expr: &Expr, document: &Document) -> Option { + if let ExprBase::ExprImport(import) = expr.expr_base()? { + return Some(SemanticImportTarget::Import { + path: extract_import_path(&import)?, + }); + } + + match trace_expr(expr, document)? { + ConstEvalResult::Import { path, fields } => { + if fields.is_empty() { + Some(SemanticImportTarget::Import { path }) + } else { + Some(SemanticImportTarget::ImportField { path, fields }) + } + } + ConstEvalResult::Local { .. } | ConstEvalResult::Std { .. } => None, + } +} diff --git a/crates/jrsonnet-lsp-inference/src/suggestions.rs b/crates/jrsonnet-lsp-inference/src/suggestions.rs new file mode 100644 index 00000000..e71e1bd7 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/suggestions.rs @@ -0,0 +1,116 @@ +//! "Did you mean?" suggestions for error messages. +//! +//! Uses string similarity to suggest corrections for typos in field names, +//! variable names, and common mistakes. + +use strsim::jaro_winkler; + +/// Minimum similarity threshold for suggestions (0.0 to 1.0). +/// Jaro-Winkler scores above this are considered "similar enough" to suggest. +const SIMILARITY_THRESHOLD: f64 = 0.8; + +/// Find the best matching name from candidates. +/// +/// Returns the candidate with the highest similarity score above the threshold, +/// or `None` if no candidate is similar enough. +pub fn find_best_match<'a>( + name: &str, + candidates: impl IntoIterator, +) -> Option<&'a str> { + let mut best: Option<(&str, f64)> = None; + + for candidate in candidates { + let score = jaro_winkler(name, candidate); + if score >= SIMILARITY_THRESHOLD { + match best { + None => best = Some((candidate, score)), + Some((_, best_score)) if score > best_score => best = Some((candidate, score)), + _ => {} + } + } + } + + best.map(|(name, _)| name) +} + +/// Find all similar names from candidates, sorted by similarity (best first). +pub fn find_similar<'a>(name: &str, candidates: impl IntoIterator) -> Vec<&'a str> { + let mut matches: Vec<_> = candidates + .into_iter() + .map(|c| (c, jaro_winkler(name, c))) + .filter(|(_, score)| *score >= SIMILARITY_THRESHOLD) + .collect(); + + matches.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal)); + matches.into_iter().map(|(name, _)| name).collect() +} + +/// Common mistakes mapping for quick corrections. +/// +/// Maps common typos/mistakes to their correct Jsonnet equivalents. +#[must_use] +pub fn suggest_common_mistake(name: &str) -> Option<&'static str> { + match name { + // Boolean literals (from other languages) + "True" | "TRUE" => Some("true"), + "False" | "FALSE" => Some("false"), + + // Null variants + "None" | "nil" | "undefined" | "NULL" | "Null" => Some("null"), + + // Function keywords from other languages + "func" | "fn" | "def" | "lambda" => Some("function"), + + // Import variants + "require" | "include" => Some("import"), + + // Self reference + "this" => Some("self"), + + _ => None, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_find_best_match_exact() { + let candidates = ["foo", "bar", "baz"]; + assert_eq!(find_best_match("foo", candidates), Some("foo")); + } + + #[test] + fn test_find_best_match_typo() { + let candidates = ["length", "format", "type"]; + assert_eq!(find_best_match("lenght", candidates), Some("length")); + } + + #[test] + fn test_find_best_match_case_typo() { + let candidates = ["objectHas", "objectKeys", "objectValues"]; + assert_eq!(find_best_match("objecthas", candidates), Some("objectHas")); + } + + #[test] + fn test_find_best_match_no_match() { + let candidates = ["foo", "bar", "baz"]; + assert_eq!(find_best_match("completely_different", candidates), None); + } + + #[test] + fn test_find_similar_multiple() { + let candidates = ["objectHas", "objectKeys", "objectValues", "object"]; + let similar = find_similar("objectHs", candidates); + assert_eq!(similar.first(), Some(&"objectHas")); + } + + #[test] + fn test_common_mistakes() { + assert_eq!(suggest_common_mistake("True"), Some("true")); + assert_eq!(suggest_common_mistake("None"), Some("null")); + assert_eq!(suggest_common_mistake("this"), Some("self")); + assert_eq!(suggest_common_mistake("valid"), None); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/type_cache.rs b/crates/jrsonnet-lsp-inference/src/type_cache.rs new file mode 100644 index 00000000..f8baebc1 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/type_cache.rs @@ -0,0 +1,598 @@ +//! Cross-file type caching for improved import type resolution. +//! +//! This module provides a cache of inferred types for exported values (top-level expressions) +//! across files. When an import expression is encountered, we can look up the cached type +//! instead of returning `Any`. +//! +//! Uses an LRU cache to bound memory usage in large codebases. +//! +//! Types are stored in the shared [`GlobalTyStore`], enabling cross-file type sharing. + +use std::{num::NonZeroUsize, sync::Arc}; + +use jrsonnet_lsp_document::{ + CanonicalPath, Document, FileId, PathResolver, PathStore, DEFAULT_TYPE_CACHE_CAPACITY, +}; +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; +use lru::LruCache; +use parking_lot::RwLock; +use rustc_hash::FxHashMap; + +use crate::analysis::TypeAnalysis; + +/// Shared import-path resolution map used during type analysis. +pub type ResolvedImportMap = Arc>; + +/// Cache of top-level types for documents. +/// +/// This stores the inferred type of each file's top-level expression, +/// enabling better type inference for imports. +/// +/// Uses an LRU eviction policy to bound memory usage. Types are stored +/// in the shared [`GlobalTyStore`], enabling cross-file type sharing. +#[derive(Debug)] +pub struct TypeCache { + /// LRU cache from file path to its cached type. + cache: LruCache, + /// Interned mapping between canonical paths and stable file ids. + paths: PathStore, + /// Read-only resolver over interned mapping. + resolver: PathResolver, + /// Global type store for shared types. + global_types: Arc, +} + +/// A cached type entry with metadata. +#[derive(Debug, Clone)] +struct CachedType { + /// The interned type for this file's top-level expression. + ty: GlobalTy, + /// The document version when this type was cached. + version: i32, +} + +impl TypeCache { + /// Create a new empty type cache with default capacity. + pub fn new(global_types: Arc, paths: PathStore) -> Self { + Self::with_capacity(global_types, DEFAULT_TYPE_CACHE_CAPACITY, paths) + } + + /// Create a new type cache with the specified capacity. + pub fn with_capacity( + global_types: Arc, + capacity: usize, + paths: PathStore, + ) -> Self { + let capacity = NonZeroUsize::new(capacity).unwrap_or(NonZeroUsize::MIN); + let resolver = paths.resolver(); + Self { + cache: LruCache::new(capacity), + paths, + resolver, + global_types, + } + } + + /// Get a reference to the global type store. + #[must_use] + pub fn global_types(&self) -> &Arc { + &self.global_types + } + + /// Get or create the interned file for `path`. + #[must_use] + pub fn intern(&self, path: &CanonicalPath) -> FileId { + self.paths.intern(path) + } + + /// Resolve a file path to an interned file identifier. + #[must_use] + pub fn file(&self, path: &CanonicalPath) -> Option { + self.resolver.file(path) + } + + /// Borrow an interned file identifier's canonical path. + #[must_use] + pub fn path(&self, file: FileId) -> Option> { + self.resolver.path(file) + } + + /// Get the cached type for a file, if available. + /// + /// Uses `peek` to avoid updating LRU order for read-only lookups. + #[must_use] + pub fn get(&self, file: FileId) -> Option { + self.cache.peek(&file).map(|cached| cached.ty) + } + + /// Get the cached type for a file and update LRU order. + /// + /// Use this when the lookup indicates actual usage of the cached type. + pub fn get_and_touch(&mut self, file: FileId) -> Option { + self.cache.get(&file).map(|cached| cached.ty) + } + + /// Update the cache for a file. + /// + /// The type must be global, preventing accidental cross-analysis leakage. + pub fn update(&mut self, file: FileId, ty: GlobalTy, version: i32) { + self.cache.put(file, CachedType { ty, version }); + } + + /// Invalidate the cache for a file. + pub fn invalidate(&mut self, file: FileId) { + self.cache.pop(&file); + } + + /// Invalidate the cache for multiple files. + pub fn invalidate_many(&mut self, files: impl IntoIterator) { + for file in files { + self.invalidate(file); + } + } + + /// Check if a file's cache is up to date with the given version. + #[must_use] + pub fn is_up_to_date(&self, file: FileId, version: i32) -> bool { + self.cache + .peek(&file) + .is_some_and(|cached| cached.version == version) + } + + /// Get the number of cached entries. + #[must_use] + pub fn len(&self) -> usize { + self.cache.len() + } + + /// Check if the cache is empty. + #[must_use] + pub fn is_empty(&self) -> bool { + self.cache.is_empty() + } + + /// Clear all cached entries. + pub fn clear(&mut self) { + self.cache.clear(); + } +} + +/// Thread-safe shared type cache. +pub type SharedTypeCache = Arc>; + +/// Create a new shared type cache with the given global type store. +pub fn new_shared_cache(global_types: Arc, paths: PathStore) -> SharedTypeCache { + Arc::new(RwLock::new(TypeCache::new(global_types, paths))) +} + +/// Analyze a document and update the type cache. +/// +/// Returns the inferred top-level type as a global type. +pub fn analyze_and_cache( + path: &CanonicalPath, + doc: &Document, + cache: &SharedTypeCache, +) -> GlobalTy { + analyze_and_cache_with_resolved_imports(path, doc, cache, std::iter::empty()) +} + +pub(crate) fn analyze_and_cache_with_resolved_imports( + path: &CanonicalPath, + doc: &Document, + cache: &SharedTypeCache, + resolved_imports: I, +) -> GlobalTy +where + I: IntoIterator, +{ + let file = { + let read_cache = cache.read(); + read_cache.file(path) + }; + let file = file.unwrap_or_else(|| cache.write().intern(path)); + analyze_and_cache_file_with_resolved_imports(file, doc, cache, resolved_imports) +} + +pub(crate) fn analyze_and_cache_file_with_resolved_imports( + file: FileId, + doc: &Document, + cache: &SharedTypeCache, + resolved_imports: I, +) -> GlobalTy +where + I: IntoIterator, +{ + let resolved_imports = Arc::new(resolved_imports.into_iter().collect()); + analyze_and_cache_file_with_resolved_import_map(file, doc, cache, resolved_imports) +} + +pub(crate) fn analyze_and_cache_file_with_resolved_import_map( + file: FileId, + doc: &Document, + cache: &SharedTypeCache, + resolved_imports: ResolvedImportMap, +) -> GlobalTy { + let version = doc.version().0; + + // Check if we already have a cached type for this version + { + let read_cache = cache.read(); + if read_cache.is_up_to_date(file, version) { + if let Some(ty) = read_cache.get(file) { + return ty; + } + } + } + + // Get the global types from the cache + let global_types = { + let read_cache = cache.read(); + Arc::clone(read_cache.global_types()) + }; + + // Create an import resolver for cross-file type resolution + let import_resolver = Arc::new(CachingImportResolver::with_resolved_import_map( + resolved_imports, + Arc::clone(cache), + )); + + // Infer the type using the global store and import resolver + let analysis = TypeAnalysis::analyze_with_resolver(doc, global_types, import_resolver); + let ty = analysis.document_type_global(); + + // Cache the type (it's already in the global store) + { + let mut write_cache = cache.write(); + write_cache.update(file, ty, version); + } + + ty +} + +/// Import resolver that looks up types from the type cache. +/// +/// Uses import paths that were already resolved by the import graph. +#[derive(Debug)] +pub struct CachingImportResolver { + /// Import path -> resolved file. + resolved_imports: ResolvedImportMap, + /// Type cache for looking up cached file types. + cache: SharedTypeCache, +} + +impl CachingImportResolver { + /// Create a new import resolver. + /// + /// # Arguments + /// * `resolved_imports` - Pre-resolved imports for the analyzed file + /// * `cache` - Shared type cache for looking up cached types + pub fn new(resolved_imports: I, cache: SharedTypeCache) -> Self + where + I: IntoIterator, + { + Self::with_resolved_import_map(Arc::new(resolved_imports.into_iter().collect()), cache) + } + + /// Create a resolver from a shared pre-built import map. + pub fn with_resolved_import_map( + resolved_imports: ResolvedImportMap, + cache: SharedTypeCache, + ) -> Self { + Self { + resolved_imports, + cache, + } + } + + fn resolved_file(&self, import_path: &str) -> Option { + self.resolved_imports.get(import_path).copied() + } +} + +impl crate::env::ImportResolver for CachingImportResolver { + fn resolve_import(&self, import_path: &str) -> Option { + let file = self.resolved_file(import_path)?; + let cache = self.cache.read(); + cache.get(file) + } +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeSet; + + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + fn test_path(name: &str) -> CanonicalPath { + CanonicalPath::new(std::path::PathBuf::from(format!("/test/{name}"))) + } + + fn test_global_store() -> Arc { + Arc::new(GlobalTyStore::new()) + } + + fn test_path_store() -> PathStore { + PathStore::new() + } + + fn cache_get(cache: &TypeCache, file: FileId) -> Option { + cache.get(file) + } + + fn cache_touch(cache: &mut TypeCache, file: FileId) -> Option { + cache.get_and_touch(file) + } + + fn cache_update(cache: &mut TypeCache, file: FileId, ty: GlobalTy, version: i32) { + cache.update(file, ty, version); + } + + fn cache_invalidate(cache: &mut TypeCache, file: FileId) { + cache.invalidate(file); + } + + fn cache_invalidate_many(cache: &mut TypeCache, files: impl IntoIterator) { + cache.invalidate_many(files); + } + + fn cache_is_up_to_date(cache: &TypeCache, file: FileId, version: i32) -> bool { + cache.is_up_to_date(file, version) + } + + /// Assert that the cache contains exactly the specified global type entries. + fn assert_cache_contents_ty(cache: &TypeCache, expected: &[(&str, GlobalTy)]) { + let actual: BTreeSet<_> = cache + .cache + .iter() + .filter_map(|(k, v)| { + cache + .path(*k) + .map(|path| (path.as_path().to_string_lossy().to_string(), v.ty)) + }) + .collect(); + let expected: BTreeSet<_> = expected + .iter() + .map(|(k, v)| (format!("/test/{k}"), *v)) + .collect(); + assert_eq!(actual, expected, "Cache contents mismatch"); + } + + #[test] + fn test_equivalent_path_lookup() { + let mut cache = TypeCache::new(test_global_store(), test_path_store()); + let path = test_path("main.jsonnet"); + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::NUMBER, 1); + + let lookup = test_path("main.jsonnet"); + let lookup_file = cache.file(&lookup).expect("lookup path should be interned"); + assert_eq!(cache_get(&cache, lookup_file), Some(GlobalTy::NUMBER)); + assert!(cache_is_up_to_date(&cache, lookup_file, 1)); + } + + #[test] + fn test_cache_basic_ty() { + let mut cache = TypeCache::new(test_global_store(), test_path_store()); + let path = test_path("main.jsonnet"); + + // Initially empty + assert_cache_contents_ty(&cache, &[]); + + // Add an entry + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::NUMBER, 1); + assert_cache_contents_ty(&cache, &[("main.jsonnet", GlobalTy::NUMBER)]); + assert!(cache_is_up_to_date(&cache, file, 1)); + assert!(!cache_is_up_to_date(&cache, file, 2)); + + // Update the entry + cache_update(&mut cache, file, GlobalTy::STRING, 2); + assert_cache_contents_ty(&cache, &[("main.jsonnet", GlobalTy::STRING)]); + assert!(cache_is_up_to_date(&cache, file, 2)); + + // Invalidate + cache_invalidate(&mut cache, file); + assert_cache_contents_ty(&cache, &[]); + } + + #[test] + fn test_analyze_and_cache() { + let global_types = test_global_store(); + let cache = new_shared_cache(global_types, test_path_store()); + let path = test_path("test.jsonnet"); + let doc = Document::new("42".to_string(), DocVersion::new(1)); + + // First call should analyze and cache + let ty1 = analyze_and_cache(&path, &doc, &cache); + assert_eq!(ty1, GlobalTy::NUMBER); + // Verify cached value + assert_eq!( + { + let read_cache = cache.read(); + read_cache.file(&path).and_then(|file| read_cache.get(file)) + }, + Some(GlobalTy::NUMBER) + ); + + // Second call should return cached value + let ty2 = analyze_and_cache(&path, &doc, &cache); + assert_eq!(ty2, GlobalTy::NUMBER); + + // New version should re-analyze + let doc2 = Document::new("\"hello\"".to_string(), DocVersion::new(2)); + let ty3 = analyze_and_cache(&path, &doc2, &cache); + assert_eq!(ty3, GlobalTy::STRING); + assert_eq!( + { + let read_cache = cache.read(); + read_cache.file(&path).and_then(|file| read_cache.get(file)) + }, + Some(GlobalTy::STRING) + ); + } + + #[test] + fn test_multiple_files_ty() { + let mut cache = TypeCache::new(test_global_store(), test_path_store()); + + let path1 = test_path("file1.jsonnet"); + let path2 = test_path("file2.jsonnet"); + let file1 = cache.intern(&path1); + let file2 = cache.intern(&path2); + + cache_update(&mut cache, file1, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2, GlobalTy::STRING, 1); + + assert_cache_contents_ty( + &cache, + &[ + ("file1.jsonnet", GlobalTy::NUMBER), + ("file2.jsonnet", GlobalTy::STRING), + ], + ); + + cache_invalidate(&mut cache, file1); + assert_cache_contents_ty(&cache, &[("file2.jsonnet", GlobalTy::STRING)]); + } + + #[test] + fn test_invalidate_many_ty() { + let mut cache = TypeCache::new(test_global_store(), test_path_store()); + + let path1 = test_path("lib.jsonnet"); + let path2 = test_path("utils.jsonnet"); + let path3 = test_path("main.jsonnet"); + let path4 = test_path("other.jsonnet"); + let file1 = cache.intern(&path1); + let file2 = cache.intern(&path2); + let file3 = cache.intern(&path3); + let file4 = cache.intern(&path4); + + // Cache all files + cache_update(&mut cache, file1, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2, GlobalTy::STRING, 1); + cache_update(&mut cache, file3, GlobalTy::BOOL, 1); + cache_update(&mut cache, file4, GlobalTy::NULL, 1); + + assert_cache_contents_ty( + &cache, + &[ + ("lib.jsonnet", GlobalTy::NUMBER), + ("main.jsonnet", GlobalTy::BOOL), + ("other.jsonnet", GlobalTy::NULL), + ("utils.jsonnet", GlobalTy::STRING), + ], + ); + + // Invalidate multiple files (simulating cascading invalidation) + cache_invalidate_many(&mut cache, vec![file1, file2, file3]); + + // Only path4 should remain + assert_cache_contents_ty(&cache, &[("other.jsonnet", GlobalTy::NULL)]); + } + + #[test] + fn test_basic_get_update() { + let mut cache = TypeCache::new(test_global_store(), test_path_store()); + let path = test_path("test.jsonnet"); + let file = cache.intern(&path); + + // Update with a global type + cache_update(&mut cache, file, GlobalTy::NUMBER, 1); + + // Get should return the same type + assert_eq!(cache_get(&cache, file), Some(GlobalTy::NUMBER)); + + // Version check + assert!(cache_is_up_to_date(&cache, file, 1)); + assert!(!cache_is_up_to_date(&cache, file, 2)); + } + + #[test] + fn test_global_store_access() { + let global_types = test_global_store(); + let mut cache = TypeCache::new(Arc::clone(&global_types), test_path_store()); + let path = test_path("test.jsonnet"); + let file = cache.intern(&path); + + // Cache a type - types are stored in the shared global store + cache_update(&mut cache, file, GlobalTy::NUMBER, 1); + + // Verify we can retrieve the type + let retrieved = cache_get(&cache, file).unwrap(); + assert_eq!(retrieved, GlobalTy::NUMBER); + + // The cache's global_types should be the same reference + assert!(Arc::ptr_eq(cache.global_types(), &global_types)); + } + + #[test] + fn test_clear() { + let mut cache = TypeCache::new(test_global_store(), test_path_store()); + let path1 = test_path("a.jsonnet"); + let path2 = test_path("b.jsonnet"); + let file1 = cache.intern(&path1); + let file2 = cache.intern(&path2); + + cache_update(&mut cache, file1, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2, GlobalTy::STRING, 1); + + assert_eq!(cache.len(), 2); + assert!(!cache.is_empty()); + + cache.clear(); + + assert_eq!(cache.len(), 0); + assert!(cache.is_empty()); + assert_eq!(cache_get(&cache, file1), None); + } + + #[test] + fn test_lru_eviction() { + // Create a cache with capacity 3 + let mut cache = TypeCache::with_capacity(test_global_store(), 3, test_path_store()); + + let path1 = test_path("file1.jsonnet"); + let path2 = test_path("file2.jsonnet"); + let path3 = test_path("file3.jsonnet"); + let path4 = test_path("file4.jsonnet"); + let file1 = cache.intern(&path1); + let file2 = cache.intern(&path2); + let file3 = cache.intern(&path3); + let file4 = cache.intern(&path4); + + // Fill the cache + cache_update(&mut cache, file1, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2, GlobalTy::STRING, 1); + cache_update(&mut cache, file3, GlobalTy::BOOL, 1); + + assert_eq!(cache.len(), 3); + assert_eq!(cache_get(&cache, file1), Some(GlobalTy::NUMBER)); + assert_eq!(cache_get(&cache, file2), Some(GlobalTy::STRING)); + assert_eq!(cache_get(&cache, file3), Some(GlobalTy::BOOL)); + + // Access path1 to make it recently used (path2 is now least recently used) + let _ = cache_touch(&mut cache, file1); + + // Add a fourth entry - should evict path2 (LRU) + cache_update(&mut cache, file4, GlobalTy::NULL, 1); + + assert_eq!(cache.len(), 3); + assert_eq!(cache_get(&cache, file1), Some(GlobalTy::NUMBER)); // Still present (was touched) + assert_eq!(cache_get(&cache, file2), None); // Evicted (was LRU) + assert_eq!(cache_get(&cache, file3), Some(GlobalTy::BOOL)); // Still present + assert_eq!(cache_get(&cache, file4), Some(GlobalTy::NULL)); // Newly added + } + + #[test] + fn test_capacity_zero_falls_back_to_one() { + // Verify with_capacity(0) doesn't panic and has minimum capacity + let mut cache = TypeCache::with_capacity(test_global_store(), 0, test_path_store()); + let path = test_path("test.jsonnet"); + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::NUMBER, 1); + assert_eq!(cache_get(&cache, file), Some(GlobalTy::NUMBER)); + } +} diff --git a/crates/jrsonnet-lsp-scenario/Cargo.toml b/crates/jrsonnet-lsp-scenario/Cargo.toml new file mode 100644 index 00000000..3cade743 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "jrsonnet-lsp-scenario" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true + +[dependencies] +crossbeam-channel = "0.5" +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } +lsp-server.workspace = true +lsp-types.workspace = true +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true +serde_yaml_with_quirks.workspace = true +tempfile.workspace = true +thiserror.workspace = true +rowan.workspace = true + +[dev-dependencies] +assert_matches = "1.5.0" + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-scenario/src/fixture.rs b/crates/jrsonnet-lsp-scenario/src/fixture.rs new file mode 100644 index 00000000..e6a88bad --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/fixture.rs @@ -0,0 +1,60 @@ +use std::{ + fs, + path::{Path, PathBuf}, +}; + +use lsp_server::Connection; +use tempfile::TempDir; +use thiserror::Error; + +use crate::{ + scenario_runner::{run_scenario, RunnerError}, + scenario_script::{parse_scenario_yaml, ParseScenarioError}, +}; + +#[derive(Debug, Error)] +pub enum ScenarioFixtureError { + #[error("create temp directory for scenario fixture: {source}")] + CreateTempDir { + #[source] + source: std::io::Error, + }, + #[error("read scenario fixture {path}: {source}")] + ReadFixture { + path: PathBuf, + #[source] + source: std::io::Error, + }, + #[error("parse scenario fixture {path}: {source}")] + ParseFixture { + path: PathBuf, + #[source] + source: Box, + }, + #[error(transparent)] + RunScenario(#[from] RunnerError), +} + +/// Parse and run a YAML fixture file against an in-memory LSP server. +/// +/// `start_server` receives the server-side `Connection` and should run the +/// server event loop until shutdown/exit. +pub fn run_yaml_fixture(path: &Path, start_server: S) -> Result<(), ScenarioFixtureError> +where + S: FnOnce(Connection) + Send + 'static, +{ + let base_dir = + TempDir::new().map_err(|source| ScenarioFixtureError::CreateTempDir { source })?; + let script = fs::read_to_string(path).map_err(|source| ScenarioFixtureError::ReadFixture { + path: path.to_path_buf(), + source, + })?; + let scenario = parse_scenario_yaml(&script, base_dir.path()).map_err(|source| { + ScenarioFixtureError::ParseFixture { + path: path.to_path_buf(), + source: Box::new(source), + } + })?; + run_scenario(&scenario, start_server)?; + Ok(()) +} diff --git a/crates/jrsonnet-lsp-scenario/src/lib.rs b/crates/jrsonnet-lsp-scenario/src/lib.rs new file mode 100644 index 00000000..680042a3 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/lib.rs @@ -0,0 +1,17 @@ +//! Scenario testing framework for Jsonnet LSP end-to-end tests. +//! +//! This crate provides: +//! - Strongly typed timeline scenario model ([`scenario`]) +//! - Human-readable YAML DSL parser ([`scenario_script`]) +//! - In-memory LSP request/notification runner ([`scenario_runner`]) +//! - Fixture helper for rstest file-based tests ([`run_yaml_fixture`]) + +pub mod fixture; +pub mod scenario; +pub mod scenario_runner; +pub mod scenario_script; +pub mod semantic_tokens; + +pub use fixture::{run_yaml_fixture, ScenarioFixtureError}; +pub use scenario_runner::{run_scenario, RunnerError}; +pub use scenario_script::{parse_scenario_yaml, ParseScenarioError}; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/diagnostics_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/diagnostics_steps.rs new file mode 100644 index 00000000..77e9344b --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario/diagnostics_steps.rs @@ -0,0 +1,72 @@ +use super::*; + +/// Expected diagnostics notification for a URI. +/// +/// Asserts the full diagnostics payload for a file. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: diagnosticsSettled +/// - step: expectDiagnostics +/// file: main.jsonnet +/// diagnostics: [] +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectDiagnosticsStep { + pub uri: String, + pub diagnostics: Vec, +} + +/// Barrier for "no new diagnostics arrive for idle_ms before timeout_ms". +/// +/// Waits until diagnostics traffic becomes idle. +/// +/// Example with defaults: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: diagnosticsSettled +/// - step: expectDiagnostics +/// file: main.jsonnet +/// diagnostics: [] +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional custom timing: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: diagnosticsSettled +/// timeout_ms: 2000 +/// idle_ms: 100 +/// - step: expectDiagnostics +/// file: main.jsonnet +/// diagnostics: [] +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct DiagnosticsSettledStep { + pub timeout_ms: u64, + pub idle_ms: u64, +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs b/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs new file mode 100644 index 00000000..e05123e8 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs @@ -0,0 +1,157 @@ +use crossbeam_channel::Sender; +use lsp_server::{Message, Notification, Response}; +use lsp_types::{ + notification::{DidOpenTextDocument, Notification as _, PublishDiagnostics}, + request::{HoverRequest, Request as _}, + Hover, HoverContents, MarkedString, PublishDiagnosticsParams, +}; +use thiserror::Error; + +use super::Scenario; + +#[derive(Debug, Error)] +pub enum DoctestAssertionError { + #[error("create temp directory for scenario: {source}")] + CreateTempDirectory { + #[source] + source: std::io::Error, + }, + #[error("parse scenario yaml: {source}")] + ParseScenario { + #[source] + source: crate::ParseScenarioError, + }, + #[error("run scenario: {source}")] + RunScenario { + #[source] + source: crate::RunnerError, + }, + #[error("serialize hover response payload: {source}")] + SerializeHoverResponse { + #[source] + source: serde_json::Error, + }, + #[error("deserialize didOpen notification payload: {source}")] + DeserializeDidOpenNotification { + #[source] + source: serde_json::Error, + }, + #[error("serialize publishDiagnostics payload: {source}")] + SerializePublishDiagnostics { + #[source] + source: serde_json::Error, + }, + #[error("send response to scenario runner")] + SendResponse, + #[error("send diagnostics notification to scenario runner")] + SendDiagnosticsNotification, +} + +pub fn assert_yaml_scenario_runs_without_error(yaml: &str) -> Result<(), DoctestAssertionError> { + let base_dir = tempfile::tempdir() + .map_err(|source| DoctestAssertionError::CreateTempDirectory { source })?; + let scenario = crate::parse_scenario_yaml(yaml, base_dir.path()) + .map_err(|source| DoctestAssertionError::ParseScenario { source })?; + assert_scenario_runs_without_error(&scenario) +} + +pub fn assert_scenario_runs_without_error( + scenario: &Scenario, +) -> Result<(), DoctestAssertionError> { + let (callback_error_tx, callback_error_rx) = crossbeam_channel::bounded(1); + let result = crate::run_scenario(scenario, move |connection| loop { + let Ok(message) = connection.receiver.recv() else { + break; + }; + match message { + Message::Request(request) => { + let response = match request.method.as_str() { + HoverRequest::METHOD => { + let hover = Hover { + contents: HoverContents::Array(vec![MarkedString::String( + "`number`".to_string(), + )]), + range: None, + }; + match serde_json::to_value(hover) { + Ok(result) => Response::new_ok(request.id, result), + Err(source) => { + send_callback_error( + &callback_error_tx, + DoctestAssertionError::SerializeHoverResponse { source }, + ); + break; + } + } + } + _ => Response { + id: request.id, + result: None, + error: None, + }, + }; + if connection.sender.send(Message::Response(response)).is_err() { + send_callback_error(&callback_error_tx, DoctestAssertionError::SendResponse); + break; + } + } + Message::Notification(notification) + if notification.method == DidOpenTextDocument::METHOD => + { + let params = match serde_json::from_value::( + notification.params, + ) { + Ok(params) => params, + Err(source) => { + send_callback_error( + &callback_error_tx, + DoctestAssertionError::DeserializeDidOpenNotification { source }, + ); + break; + } + }; + let publish = PublishDiagnosticsParams { + uri: params.text_document.uri, + version: Some(params.text_document.version), + diagnostics: vec![], + }; + let payload = match serde_json::to_value(publish) { + Ok(payload) => payload, + Err(source) => { + send_callback_error( + &callback_error_tx, + DoctestAssertionError::SerializePublishDiagnostics { source }, + ); + break; + } + }; + let publish_notification = + Notification::new(PublishDiagnostics::METHOD.to_string(), payload); + if connection + .sender + .send(Message::Notification(publish_notification)) + .is_err() + { + send_callback_error( + &callback_error_tx, + DoctestAssertionError::SendDiagnosticsNotification, + ); + break; + } + } + Message::Notification(notification) if notification.method == "exit" => break, + Message::Notification(_) | Message::Response(_) => {} + } + }); + if let Ok(error) = callback_error_rx.try_recv() { + return Err(error); + } + result.map_err(|source| DoctestAssertionError::RunScenario { source }) +} + +fn send_callback_error( + callback_error: &Sender, + error: DoctestAssertionError, +) { + let _ = callback_error.try_send(error); +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/document_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/document_steps.rs new file mode 100644 index 00000000..7394971f --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario/document_steps.rs @@ -0,0 +1,186 @@ +use super::*; + +/// `textDocument/didOpen`. +/// +/// Opens a document in the scenario session. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: | +/// local x = 1; +/// x +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional fields: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// language_id: jsonnet +/// version: 3 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct OpenStep { + pub uri: String, + pub text: String, + pub language_id: String, + pub version: i32, +} + +/// `textDocument/didChange` full-document replacement. +/// +/// Replaces the full contents of an already-open document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: changeFull +/// file: main.jsonnet +/// text: "2" +/// version: 2 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ChangeFullStep { + pub uri: String, + pub text: String, + pub version: i32, +} + +impl ChangeFullStep { + #[must_use] + pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { + TextDocumentContentChangeEvent { + range: None, + range_length: None, + text: self.text.clone(), + } + } +} + +/// `textDocument/didChange` incremental edit. +/// +/// Applies a range edit to an already-open document. +/// +/// Example using marker shorthand (`at` + `len`): +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: | +/// local [[target:1]] = 1; +/// target +/// - step: changeIncremental +/// file: main.jsonnet +/// at: target +/// len: 1 +/// text: "2" +/// version: 2 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ChangeIncrementalStep { + pub uri: String, + pub range: Range, + pub text: String, + pub version: i32, +} + +impl ChangeIncrementalStep { + #[must_use] + pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { + TextDocumentContentChangeEvent { + range: Some(self.range), + range_length: None, + text: self.text.clone(), + } + } +} + +/// `textDocument/didSave`. +/// +/// Emits a save notification for an open document. +/// +/// Example without text payload: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: save +/// file: main.jsonnet +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional `text` payload: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: save +/// file: main.jsonnet +/// text: "{ answer: 42 }" +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct SaveStep { + pub uri: String, + pub text: Option, +} + +/// `textDocument/didClose`. +/// +/// Closes a previously-open document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: close +/// file: main.jsonnet +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CloseStep { + pub uri: String, +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs new file mode 100644 index 00000000..7aa5a328 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs @@ -0,0 +1,140 @@ +//! Canonical scenario model for multi-file, multi-step LSP timeline tests. +//! +//! YAML scenarios are parsed by [`crate::scenario_script::parse_scenario_yaml`] +//! and compiled into these strongly typed structures. +//! +//! This module is the compiled execution model; author scenarios in YAML. +//! +//! Example: +//! ```rust +//! use jrsonnet_lsp_scenario::{ +//! parse_scenario_yaml, +//! scenario::doctest_assertions::assert_scenario_runs_without_error, +//! }; +//! +//! let base_dir = tempfile::tempdir().expect("tempdir"); +//! +//! let yaml = r#" +//! steps: +//! - step: create +//! files: +//! main.jsonnet: |- +//! { answer: 42 } +//! open: [main.jsonnet] +//! - step: diagnosticsSettled +//! - step: expectDiagnostics +//! file: main.jsonnet +//! diagnostics: [] +//! "#; +//! +//! let actual = parse_scenario_yaml(yaml, base_dir.path()).expect("parse scenario"); +//! assert_scenario_runs_without_error(&actual); +//! ``` + +mod diagnostics_steps; +mod document_steps; +mod request_steps; +mod workspace_steps; + +pub use diagnostics_steps::{DiagnosticsSettledStep, ExpectDiagnosticsStep}; +pub use document_steps::{ChangeFullStep, ChangeIncrementalStep, CloseStep, OpenStep, SaveStep}; +use lsp_types::{ + CodeActionKind, CodeActionOrCommand, CodeLens, CompletionResponse, Diagnostic, + DocumentSymbolResponse, FileChangeType, GotoDefinitionResponse, InlayHint, Location, Position, + PrepareRenameResponse, Range, SemanticTokensRangeResult, SemanticTokensResult, SignatureHelp, + TextDocumentContentChangeEvent, TextEdit, WorkspaceEdit, WorkspaceSymbolResponse, +}; +pub use request_steps::{ + ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, ExpectCustomStep, + ExpectDeclarationStep, ExpectDefinitionStep, ExpectDocumentSymbolStep, + ExpectExecuteCodeLensStep, ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, + ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectRangeFormattingStep, ExpectReferencesStep, + ExpectRenameStep, ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, + ExpectSignatureHelpStep, ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, + HoverSectionExpectation, RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, + RequestCustomStep, RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, + RequestExecuteCodeLensStep, RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, + RequestInlayHintsStep, RequestPrepareRenameStep, RequestRangeFormattingStep, + RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, + RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, + RequestWorkspaceSymbolStep, +}; +use serde::Deserialize; +pub use workspace_steps::{ + ConfigStep, DeleteFileStep, NotifyWatchedFilesStep, ScenarioFileChangeType, + WatchedFileChangeStep, WriteFileStep, +}; + +/// A full timeline scenario. +#[derive(Debug, Clone, PartialEq)] +pub struct Scenario { + pub steps: Vec, +} + +impl Scenario { + #[must_use] + pub fn new(steps: Vec) -> Self { + Self { steps } + } +} + +/// One timeline step. +#[derive(Debug, Clone, PartialEq)] +pub enum ScenarioStep { + Open(OpenStep), + ChangeFull(ChangeFullStep), + ChangeIncremental(ChangeIncrementalStep), + Save(SaveStep), + Close(CloseStep), + Config(ConfigStep), + WriteFile(WriteFileStep), + DeleteFile(DeleteFileStep), + NotifyWatchedFiles(NotifyWatchedFilesStep), + RequestCodeAction(RequestCodeActionStep), + ExpectCodeAction(ExpectCodeActionStep), + RequestReferences(RequestReferencesStep), + ExpectReferences(ExpectReferencesStep), + RequestDefinition(RequestDefinitionStep), + ExpectDefinition(ExpectDefinitionStep), + RequestDeclaration(RequestDeclarationStep), + ExpectDeclaration(ExpectDeclarationStep), + RequestTypeDefinition(RequestTypeDefinitionStep), + ExpectTypeDefinition(ExpectTypeDefinitionStep), + RequestPrepareRename(RequestPrepareRenameStep), + ExpectPrepareRename(ExpectPrepareRenameStep), + RequestRename(RequestRenameStep), + ExpectRename(ExpectRenameStep), + RequestHover(RequestHoverStep), + ExpectHover(ExpectHoverStep), + RequestSignatureHelp(RequestSignatureHelpStep), + ExpectSignatureHelp(ExpectSignatureHelpStep), + RequestCompletion(RequestCompletionStep), + ExpectCompletion(ExpectCompletionStep), + RequestFormatting(RequestFormattingStep), + ExpectFormatting(ExpectFormattingStep), + RequestRangeFormatting(RequestRangeFormattingStep), + ExpectRangeFormatting(ExpectRangeFormattingStep), + RequestSemanticTokensFull(RequestSemanticTokensFullStep), + ExpectSemanticTokensFull(ExpectSemanticTokensFullStep), + RequestSemanticTokensRange(RequestSemanticTokensRangeStep), + ExpectSemanticTokensRange(ExpectSemanticTokensRangeStep), + RequestInlayHints(RequestInlayHintsStep), + ExpectInlayHints(ExpectInlayHintsStep), + RequestDocumentSymbol(RequestDocumentSymbolStep), + ExpectDocumentSymbol(ExpectDocumentSymbolStep), + RequestWorkspaceSymbol(RequestWorkspaceSymbolStep), + ExpectWorkspaceSymbol(ExpectWorkspaceSymbolStep), + RequestCodeLens(RequestCodeLensStep), + ExpectCodeLens(ExpectCodeLensStep), + RequestExecuteCommand(RequestExecuteCommandStep), + ExpectExecuteCommand(ExpectExecuteCommandStep), + RequestExecuteCodeLens(RequestExecuteCodeLensStep), + ExpectExecuteCodeLens(ExpectExecuteCodeLensStep), + RequestCustom(RequestCustomStep), + ExpectCustom(ExpectCustomStep), + ExpectDiagnostics(ExpectDiagnosticsStep), + DiagnosticsSettled(DiagnosticsSettledStep), +} + +#[doc(hidden)] +pub mod doctest_assertions; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs new file mode 100644 index 00000000..62ef0575 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs @@ -0,0 +1,1298 @@ +use serde::Deserialize; + +use super::*; + +/// `textDocument/codeAction` request. +/// +/// Requests code actions for a range in a document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestCodeAction +/// as: actions +/// file: main.jsonnet +/// range: m +/// - step: expectCodeAction +/// request: actions +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional filters: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestCodeAction +/// as: actions +/// file: main.jsonnet +/// range: m +/// diagnostics: [] +/// only: [quickfix] +/// - step: expectCodeAction +/// request: actions +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestCodeActionStep { + pub(crate) id: i32, + pub uri: String, + pub range: Range, + pub diagnostics: Vec, + pub only: Option>, +} + +/// Expected `textDocument/codeAction` response. +/// +/// Asserts the response for a preceding `requestCodeAction`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestCodeAction +/// as: actions +/// file: main.jsonnet +/// range: m +/// - step: expectCodeAction +/// request: actions +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectCodeActionStep { + pub(crate) id: i32, + pub result: Option>, +} + +/// `textDocument/references` request. +/// +/// Requests symbol references at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestReferences +/// as: refs +/// file: main.jsonnet +/// at: m +/// - step: expectReferences +/// request: refs +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional `include_declaration`: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestReferences +/// as: refs +/// file: main.jsonnet +/// at: m +/// include_declaration: true +/// - step: expectReferences +/// request: refs +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestReferencesStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, + pub include_declaration: bool, +} + +/// Expected `textDocument/references` response. +/// +/// Asserts the response for a preceding `requestReferences`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestReferences +/// as: refs +/// file: main.jsonnet +/// at: m +/// - step: expectReferences +/// request: refs +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectReferencesStep { + pub(crate) id: i32, + pub result: Option>, +} + +/// `textDocument/definition` request. +/// +/// Requests definition locations at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestDefinition +/// as: def +/// file: main.jsonnet +/// at: m +/// - step: expectDefinition +/// request: def +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestDefinitionStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/definition` response. +/// +/// Asserts the response for a preceding `requestDefinition`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestDefinition +/// as: def +/// file: main.jsonnet +/// at: m +/// - step: expectDefinition +/// request: def +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectDefinitionStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/declaration` request. +/// +/// Requests declaration locations at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestDeclaration +/// as: decl +/// file: main.jsonnet +/// at: m +/// - step: expectDeclaration +/// request: decl +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestDeclarationStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/declaration` response. +/// +/// Asserts the response for a preceding `requestDeclaration`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestDeclaration +/// as: decl +/// file: main.jsonnet +/// at: m +/// - step: expectDeclaration +/// request: decl +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectDeclarationStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/typeDefinition` request. +/// +/// Requests type-definition locations at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestTypeDefinition +/// as: type_def +/// file: main.jsonnet +/// at: m +/// - step: expectTypeDefinition +/// request: type_def +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestTypeDefinitionStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/typeDefinition` response. +/// +/// Asserts the response for a preceding `requestTypeDefinition`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestTypeDefinition +/// as: type_def +/// file: main.jsonnet +/// at: m +/// - step: expectTypeDefinition +/// request: type_def +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectTypeDefinitionStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/prepareRename` request. +/// +/// Requests rename-preparation data at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestPrepareRename +/// as: prep +/// file: main.jsonnet +/// at: m +/// - step: expectPrepareRename +/// request: prep +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestPrepareRenameStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/prepareRename` response. +/// +/// Asserts the response for a preceding `requestPrepareRename`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestPrepareRename +/// as: prep +/// file: main.jsonnet +/// at: m +/// - step: expectPrepareRename +/// request: prep +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectPrepareRenameStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/rename` request. +/// +/// Requests a rename operation. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestRename +/// as: rename +/// file: main.jsonnet +/// at: m +/// new_name: renamed +/// - step: expectRename +/// request: rename +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestRenameStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, + pub new_name: String, +} + +/// Expected `textDocument/rename` response. +/// +/// Asserts the response for a preceding `requestRename`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestRename +/// as: rename +/// file: main.jsonnet +/// at: m +/// new_name: renamed +/// - step: expectRename +/// request: rename +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectRenameStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/hover` request. +/// +/// Requests hover information at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestHover +/// as: hover +/// file: main.jsonnet +/// at: m +/// - step: expectHover +/// request: hover +/// result: +/// - type: number +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestHoverStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/hover` response. +/// +/// Asserts the response for a preceding `requestHover`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestHover +/// as: hover +/// file: main.jsonnet +/// at: m +/// - step: expectHover +/// request: hover +/// result: +/// - type: number +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectHoverStep { + pub(crate) id: i32, + /// Ordered hover sections rendered by the server. + /// + /// `null` means "expect no hover response". + pub result: Option>, +} + +/// One expected hover section, compared in authored order using exact matching. +/// +/// Exactly one key should be present per list item. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +pub enum HoverSectionExpectation { + Type { + #[serde(rename = "type")] + ty: String, + }, + Context { + context: String, + }, + Preview { + preview: HoverCodeBlockExpectation, + }, + Docs { + docs: HoverDocsExpectation, + }, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct HoverCodeBlockExpectation { + pub language: String, + pub value: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +pub enum HoverDocsExpectation { + Markdown(String), + Code(HoverCodeBlockExpectation), +} + +impl HoverSectionExpectation { + #[must_use] + pub fn to_markdown(&self) -> String { + match self { + Self::Type { ty } => format!("`{ty}`"), + Self::Context { context } => context.clone(), + Self::Preview { preview } => preview.value.clone(), + Self::Docs { docs } => match docs { + HoverDocsExpectation::Markdown(markdown) => markdown.clone(), + HoverDocsExpectation::Code(code) => { + format!("```{}\n{}\n```", code.language, code.value) + } + }, + } + } + + #[must_use] + pub fn to_marked_string(&self) -> lsp_types::MarkedString { + match self { + Self::Preview { preview } => { + lsp_types::MarkedString::LanguageString(lsp_types::LanguageString { + language: preview.language.clone(), + value: preview.value.clone(), + }) + } + Self::Docs { + docs: HoverDocsExpectation::Code(code), + } => lsp_types::MarkedString::LanguageString(lsp_types::LanguageString { + language: code.language.clone(), + value: code.value.clone(), + }), + Self::Type { .. } + | Self::Context { .. } + | Self::Docs { + docs: HoverDocsExpectation::Markdown(_), + } => lsp_types::MarkedString::String(self.to_markdown()), + } + } +} + +/// `textDocument/signatureHelp` request. +/// +/// Requests signature help at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local f(a) = a; f(((sig:|))1)" +/// - step: requestSignatureHelp +/// as: sig +/// file: main.jsonnet +/// at: sig +/// - step: expectSignatureHelp +/// request: sig +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestSignatureHelpStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/signatureHelp` response. +/// +/// Asserts the response for a preceding `requestSignatureHelp`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local f(a) = a; f(((sig:|))1)" +/// - step: requestSignatureHelp +/// as: sig +/// file: main.jsonnet +/// at: sig +/// - step: expectSignatureHelp +/// request: sig +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectSignatureHelpStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/completion` request. +/// +/// Requests completion items at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "std.((site:|))" +/// - step: requestCompletion +/// as: completion +/// file: main.jsonnet +/// at: site +/// - step: expectCompletion +/// request: completion +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestCompletionStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/completion` response. +/// +/// Asserts the response for a preceding `requestCompletion`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "std.((site:|))" +/// - step: requestCompletion +/// as: completion +/// file: main.jsonnet +/// at: site +/// - step: expectCompletion +/// request: completion +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectCompletionStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/formatting` request. +/// +/// Requests formatting edits for a document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{x:1}" +/// - step: requestFormatting +/// as: formatting +/// file: main.jsonnet +/// - step: expectFormatting +/// request: formatting +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional formatting options: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{x:1}" +/// - step: requestFormatting +/// as: formatting +/// file: main.jsonnet +/// tab_size: 4 +/// insert_spaces: false +/// trim_final_newlines: true +/// insert_final_newline: true +/// - step: expectFormatting +/// request: formatting +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestFormattingStep { + pub(crate) id: i32, + pub uri: String, + pub tab_size: u32, + pub insert_spaces: bool, + pub trim_trailing_whitespace: Option, + pub insert_final_newline: Option, + pub trim_final_newlines: Option, +} + +/// Expected `textDocument/formatting` response. +/// +/// Asserts the response for a preceding `requestFormatting`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{x:1}" +/// - step: requestFormatting +/// as: formatting +/// file: main.jsonnet +/// - step: expectFormatting +/// request: formatting +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectFormattingStep { + pub(crate) id: i32, + pub result: Option>, +} + +/// `textDocument/rangeFormatting` request. +/// +/// Requests formatting edits for a selected range in a document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{ [[f:x]]:1 }" +/// - step: requestRangeFormatting +/// as: formattingRange +/// file: main.jsonnet +/// range: f +/// - step: expectRangeFormatting +/// request: formattingRange +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestRangeFormattingStep { + pub(crate) id: i32, + pub uri: String, + pub range: Range, + pub tab_size: u32, + pub insert_spaces: bool, + pub trim_trailing_whitespace: Option, + pub insert_final_newline: Option, + pub trim_final_newlines: Option, +} + +/// Expected `textDocument/rangeFormatting` response. +/// +/// Asserts the response for a preceding `requestRangeFormatting`. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectRangeFormattingStep { + pub(crate) id: i32, + pub result: Option>, +} + +/// `textDocument/semanticTokens/full` request. +/// +/// Requests semantic tokens for the whole document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestSemanticTokensFull +/// as: tokens +/// file: main.jsonnet +/// - step: expectSemanticTokensFull +/// request: tokens +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestSemanticTokensFullStep { + pub(crate) id: i32, + pub uri: String, +} + +/// Expected `textDocument/semanticTokens/full` response. +/// +/// Asserts the response for a preceding `requestSemanticTokensFull`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestSemanticTokensFull +/// as: tokens +/// file: main.jsonnet +/// - step: expectSemanticTokensFull +/// request: tokens +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectSemanticTokensFullStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/semanticTokens/range` request. +/// +/// Requests semantic tokens for a selected range. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestSemanticTokensRange +/// as: tokens +/// file: main.jsonnet +/// range: name +/// - step: expectSemanticTokensRange +/// request: tokens +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestSemanticTokensRangeStep { + pub(crate) id: i32, + pub uri: String, + pub range: Range, +} + +/// Expected `textDocument/semanticTokens/range` response. +/// +/// Asserts the response for a preceding `requestSemanticTokensRange`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestSemanticTokensRange +/// as: tokens +/// file: main.jsonnet +/// range: name +/// - step: expectSemanticTokensRange +/// request: tokens +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectSemanticTokensRangeStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/inlayHint` request. +/// +/// Requests inlay hints for a range. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestInlayHints +/// as: hints +/// file: main.jsonnet +/// range: name +/// - step: expectInlayHints +/// request: hints +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestInlayHintsStep { + pub(crate) id: i32, + pub uri: String, + pub range: Range, +} + +/// Expected `textDocument/inlayHint` response. +/// +/// Asserts the response for a preceding `requestInlayHints`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestInlayHints +/// as: hints +/// file: main.jsonnet +/// range: name +/// - step: expectInlayHints +/// request: hints +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone)] +pub struct ExpectInlayHintsStep { + pub(crate) id: i32, + pub result: Option>, +} + +impl PartialEq for ExpectInlayHintsStep { + fn eq(&self, other: &Self) -> bool { + if self.id != other.id { + return false; + } + serde_json::to_value(&self.result).ok() == serde_json::to_value(&other.result).ok() + } +} + +/// `textDocument/documentSymbol` request. +/// +/// Requests document symbol outline data. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{ local x = 1, y: x }" +/// - step: requestDocumentSymbol +/// as: symbols +/// file: main.jsonnet +/// - step: expectDocumentSymbol +/// request: symbols +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestDocumentSymbolStep { + pub(crate) id: i32, + pub uri: String, +} + +/// Expected `textDocument/documentSymbol` response. +/// +/// Asserts the response for a preceding `requestDocumentSymbol`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{ local x = 1, y: x }" +/// - step: requestDocumentSymbol +/// as: symbols +/// file: main.jsonnet +/// - step: expectDocumentSymbol +/// request: symbols +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectDocumentSymbolStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `workspace/symbol` request. +/// +/// Requests workspace-wide symbol search results. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: requestWorkspaceSymbol +/// as: symbols +/// query: mySymbol +/// - step: expectWorkspaceSymbol +/// request: symbols +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestWorkspaceSymbolStep { + pub(crate) id: i32, + pub query: String, +} + +/// Expected `workspace/symbol` response. +/// +/// Asserts the response for a preceding `requestWorkspaceSymbol`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: requestWorkspaceSymbol +/// as: symbols +/// query: mySymbol +/// - step: expectWorkspaceSymbol +/// request: symbols +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectWorkspaceSymbolStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/codeLens` request. +/// +/// Requests code lenses for a document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{ x: 1 }" +/// - step: requestCodeLens +/// as: lenses +/// file: main.jsonnet +/// - step: expectCodeLens +/// request: lenses +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestCodeLensStep { + pub(crate) id: i32, + pub uri: String, +} + +/// Expected `textDocument/codeLens` response. +/// +/// Asserts the response for a preceding `requestCodeLens`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{ x: 1 }" +/// - step: requestCodeLens +/// as: lenses +/// file: main.jsonnet +/// - step: expectCodeLens +/// request: lenses +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectCodeLensStep { + pub(crate) id: i32, + pub result: Option>, +} + +/// `workspace/executeCommand` request. +/// +/// Requests execution of a workspace command. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: requestExecuteCommand +/// as: command +/// command: jsonnet.test.command +/// - step: expectExecuteCommand +/// request: command +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional command arguments: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: requestExecuteCommand +/// as: command +/// command: jsonnet.test.command +/// arguments: [1, "arg"] +/// - step: expectExecuteCommand +/// request: command +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestExecuteCommandStep { + pub(crate) id: i32, + pub command: String, + pub arguments: Vec, +} + +/// Expected `workspace/executeCommand` response. +/// +/// Asserts the response for a preceding `requestExecuteCommand`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: requestExecuteCommand +/// as: command +/// command: jsonnet.test.command +/// - step: expectExecuteCommand +/// request: command +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectExecuteCommandStep { + pub(crate) id: i32, + pub result: Option, +} + +/// Execute one command from a prior `requestCodeLens` response. +/// +/// This step pulls the command at `index` from the referenced code-lens +/// response and dispatches it via `workspace/executeCommand`. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestExecuteCodeLensStep { + pub(crate) id: i32, + pub code_lens_request_id: i32, + pub index: usize, +} + +/// Expected response for a preceding `requestExecuteCodeLens`. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectExecuteCodeLensStep { + pub(crate) id: i32, + pub result: Option, +} + +/// Generic custom request for non-standard methods. +/// +/// Sends an arbitrary request method and JSON params payload. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestCustomStep { + pub(crate) id: i32, + pub method: String, + pub params: serde_json::Value, +} + +/// Expected response for a preceding `requestCustom`. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectCustomStep { + pub(crate) id: i32, + pub result: Option, +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/workspace_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/workspace_steps.rs new file mode 100644 index 00000000..226c659a --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario/workspace_steps.rs @@ -0,0 +1,148 @@ +use super::*; + +/// `workspace/didChangeConfiguration`. +/// +/// Pushes configuration updates to the server. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: config +/// settings: +/// jsonnet: +/// diagnostics: +/// maxProblems: 200 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ConfigStep { + pub settings: serde_json::Value, +} + +/// Writes text to a file on disk. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: writeFile +/// path: libs/util.jsonnet +/// text: "{ x: 1 }" +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct WriteFileStep { + pub path: String, + pub text: String, +} + +/// Deletes a file from disk. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: writeFile +/// path: libs/util.jsonnet +/// text: "{ x: 1 }" +/// - step: deleteFile +/// path: libs/util.jsonnet +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct DeleteFileStep { + pub path: String, +} + +/// File change kind for watched-files notifications. +/// +/// Example values: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: notifyWatchedFiles +/// changes: +/// - path: vendor/new.jsonnet +/// type: created +/// - path: vendor/existing.jsonnet +/// type: changed +/// - path: vendor/old.jsonnet +/// type: deleted +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum ScenarioFileChangeType { + Created, + Changed, + Deleted, +} + +impl ScenarioFileChangeType { + #[must_use] + pub const fn as_lsp(self) -> FileChangeType { + match self { + Self::Created => FileChangeType::CREATED, + Self::Changed => FileChangeType::CHANGED, + Self::Deleted => FileChangeType::DELETED, + } + } +} + +/// One watched-file change event. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: notifyWatchedFiles +/// changes: +/// - path: vendor/lib.jsonnet +/// type: changed +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct WatchedFileChangeStep { + pub uri: String, + pub change_type: ScenarioFileChangeType, +} + +/// `workspace/didChangeWatchedFiles`. +/// +/// Emits a watched-files change notification. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: notifyWatchedFiles +/// changes: +/// - path: vendor/lib.jsonnet +/// type: created +/// - path: vendor/old.jsonnet +/// type: deleted +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct NotifyWatchedFilesStep { + pub changes: Vec, +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/document_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/document_steps.rs new file mode 100644 index 00000000..fc1bee2d --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/document_steps.rs @@ -0,0 +1,156 @@ +use std::{fs, path::PathBuf}; + +use lsp_types::{ + notification::{ + DidChangeConfiguration, DidChangeTextDocument, DidChangeWatchedFiles, DidCloseTextDocument, + DidOpenTextDocument, DidSaveTextDocument, Notification as _, + }, + DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams, + DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, FileEvent, + TextDocumentIdentifier, TextDocumentItem, +}; +use thiserror::Error; + +use super::{helpers::parse_uri, RunnerResult, ScenarioRunner}; +use crate::scenario::{ + ChangeFullStep, ChangeIncrementalStep, CloseStep, ConfigStep, DeleteFileStep, + NotifyWatchedFilesStep, OpenStep, SaveStep, WriteFileStep, +}; + +#[derive(Debug, Error)] +pub enum FilesystemError { + #[error("create parent directories for writeFile {path}: {source}")] + CreateParentDirs { + path: PathBuf, + #[source] + source: std::io::Error, + }, + #[error("writeFile {path} failed: {source}")] + WriteFile { + path: PathBuf, + #[source] + source: std::io::Error, + }, + #[error("deleteFile {path} failed: {source}")] + DeleteFile { + path: PathBuf, + #[source] + source: std::io::Error, + }, +} + +impl ScenarioRunner { + pub(super) fn step_open(&self, step: &OpenStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "open")?; + let params = DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri, + language_id: step.language_id.clone(), + version: step.version, + text: step.text.clone(), + }, + }; + self.send_notification_with_params(DidOpenTextDocument::METHOD, params, "didOpen") + } + + pub(super) fn step_change_full(&self, step: &ChangeFullStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "didChange(full)")?; + let params = DidChangeTextDocumentParams { + text_document: lsp_types::VersionedTextDocumentIdentifier { + uri, + version: step.version, + }, + content_changes: vec![step.as_change_event()], + }; + self.send_notification_with_params(DidChangeTextDocument::METHOD, params, "didChange(full)") + } + + pub(super) fn step_change_incremental(&self, step: &ChangeIncrementalStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "didChange(incremental)")?; + let params = DidChangeTextDocumentParams { + text_document: lsp_types::VersionedTextDocumentIdentifier { + uri, + version: step.version, + }, + content_changes: vec![step.as_change_event()], + }; + self.send_notification_with_params( + DidChangeTextDocument::METHOD, + params, + "didChange(incremental)", + ) + } + + pub(super) fn step_save(&self, step: &SaveStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "didSave")?; + let params = DidSaveTextDocumentParams { + text_document: TextDocumentIdentifier { uri }, + text: step.text.clone(), + }; + self.send_notification_with_params(DidSaveTextDocument::METHOD, params, "didSave") + } + + pub(super) fn step_close(&self, step: &CloseStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "didClose")?; + let params = DidCloseTextDocumentParams { + text_document: TextDocumentIdentifier { uri }, + }; + self.send_notification_with_params(DidCloseTextDocument::METHOD, params, "didClose") + } + + pub(super) fn step_config(&self, step: &ConfigStep) -> RunnerResult<()> { + let params = DidChangeConfigurationParams { + settings: step.settings.clone(), + }; + self.send_notification_with_params( + DidChangeConfiguration::METHOD, + params, + "didChangeConfiguration", + ) + } + + pub(super) fn step_write_file(step: &WriteFileStep) -> RunnerResult<()> { + let path = PathBuf::from(&step.path); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).map_err(|source| FilesystemError::CreateParentDirs { + path: path.clone(), + source, + })?; + } + fs::write(&path, &step.text) + .map_err(|source| FilesystemError::WriteFile { path, source })?; + Ok(()) + } + + pub(super) fn step_delete_file(step: &DeleteFileStep) -> RunnerResult<()> { + let path = PathBuf::from(&step.path); + match fs::remove_file(&path) { + Ok(()) => Ok(()), + Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(()), + Err(source) => Err(FilesystemError::DeleteFile { path, source }.into()), + } + } + + pub(super) fn step_notify_watched_files( + &self, + step: &NotifyWatchedFilesStep, + ) -> RunnerResult<()> { + let changes = step + .changes + .iter() + .map(|change| { + let uri = parse_uri(&change.uri, "watched-files")?; + Ok::(FileEvent { + uri, + typ: change.change_type.as_lsp(), + }) + }) + .collect::, _>>()?; + let params = DidChangeWatchedFilesParams { changes }; + self.send_notification_with_params( + DidChangeWatchedFiles::METHOD, + params, + "didChangeWatchedFiles", + ) + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/errors.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/errors.rs new file mode 100644 index 00000000..3e37ca32 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/errors.rs @@ -0,0 +1,73 @@ +use thiserror::Error; + +use super::{ + document_steps::FilesystemError, + expectation_steps::AssertionError, + helpers::UriError, + request_steps::RequestStepError, + transport::{RpcError, SerdeError, TransportError}, +}; + +#[derive(Debug, Error)] +pub enum RunnerError { + #[error(transparent)] + Uri(Box), + #[error(transparent)] + Serde(Box), + #[error(transparent)] + Transport(Box), + #[error(transparent)] + Rpc(Box), + #[error(transparent)] + Assertion(Box), + #[error(transparent)] + RequestStep(Box), + #[error(transparent)] + Filesystem(Box), + #[error("server thread panicked")] + ServerThreadPanicked, +} + +pub(super) type RunnerResult = Result; + +impl From for RunnerError { + fn from(error: UriError) -> Self { + Self::Uri(Box::new(error)) + } +} + +impl From for RunnerError { + fn from(error: SerdeError) -> Self { + Self::Serde(Box::new(error)) + } +} + +impl From for RunnerError { + fn from(error: TransportError) -> Self { + Self::Transport(Box::new(error)) + } +} + +impl From for RunnerError { + fn from(error: RpcError) -> Self { + Self::Rpc(Box::new(error)) + } +} + +impl From for RunnerError { + fn from(error: AssertionError) -> Self { + Self::Assertion(Box::new(error)) + } +} + +impl From for RunnerError { + fn from(error: RequestStepError) -> Self { + Self::RequestStep(Box::new(error)) + } +} + +impl From for RunnerError { + fn from(error: FilesystemError) -> Self { + Self::Filesystem(Box::new(error)) + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs new file mode 100644 index 00000000..b9cc5dd1 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs @@ -0,0 +1,392 @@ +use std::time::Duration; + +use serde::{de::DeserializeOwned, Serialize}; +use serde_json::Value; +use thiserror::Error; + +use super::{ + helpers::{hover_array_sections_from_json, json_mismatch_report, JsonMismatchReport}, + transport::{RpcError, SerdeError, TransportError}, + RunnerResult, ScenarioRunner, REQUEST_TIMEOUT, +}; +use crate::scenario::{ + DiagnosticsSettledStep, ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, + ExpectCustomStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, + ExpectDocumentSymbolStep, ExpectExecuteCodeLensStep, ExpectExecuteCommandStep, + ExpectFormattingStep, ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, + ExpectRangeFormattingStep, ExpectReferencesStep, ExpectRenameStep, + ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, + ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, +}; + +#[derive(Debug, Error)] +pub enum AssertionError { + #[error("hover response {id} had no result, expected hover section array")] + HoverMissingResult { id: i32 }, + #[error("hover response {id} had unsupported content shape: {hover}")] + HoverMalformedContent { id: i32, hover: String }, + #[error("diagnostics mismatch for uri {uri}\n{details}")] + DiagnosticsMismatch { + uri: String, + details: JsonMismatchReport, + }, +} + +impl ScenarioRunner { + fn mismatch_details(actual: &A, expected: &E) -> RunnerResult + where + A: Serialize, + E: Serialize, + { + let actual_json = + serde_json::to_value(actual).map_err(|source| SerdeError::SerializeValue { + context: "response mismatch actual", + source, + })?; + let expected_json = + serde_json::to_value(expected).map_err(|source| SerdeError::SerializeValue { + context: "response mismatch expected", + source, + })?; + Ok(json_mismatch_report(&actual_json, &expected_json)) + } + + fn response_mismatch( + method: &'static str, + id: i32, + actual: &A, + expected: &E, + ) -> RunnerResult<()> + where + A: Serialize, + E: Serialize, + { + Err(RpcError::ResponseMismatch { + method, + id, + details: Self::mismatch_details(actual, expected)?, + } + .into()) + } + + fn response_mismatch_json_values( + method: &'static str, + id: i32, + actual: &Value, + expected: &Value, + ) -> RunnerResult<()> { + Err(RpcError::ResponseMismatch { + method, + id, + details: json_mismatch_report(actual, expected), + } + .into()) + } + + fn expect_typed_response( + &mut self, + method: &'static str, + id: i32, + expected: &Option, + ) -> RunnerResult<()> + where + T: DeserializeOwned + PartialEq + Serialize, + { + let actual = self.response_result::(method, id)?; + if &actual != expected { + return Self::response_mismatch(method, id, &actual, expected); + } + Ok(()) + } + + pub(super) fn step_expect_code_action( + &mut self, + step: &ExpectCodeActionStep, + ) -> RunnerResult<()> { + let actual = + self.response_result::>("codeAction", step.id)?; + if actual != step.result { + return Self::response_mismatch("codeAction", step.id, &actual, &step.result); + } + Ok(()) + } + + pub(super) fn step_expect_references( + &mut self, + step: &ExpectReferencesStep, + ) -> RunnerResult<()> { + let actual = self.response_result::>("references", step.id)?; + if actual != step.result { + return Self::response_mismatch("references", step.id, &actual, &step.result); + } + Ok(()) + } + + pub(super) fn step_expect_definition( + &mut self, + step: &ExpectDefinitionStep, + ) -> RunnerResult<()> { + self.expect_typed_response("definition", step.id, &step.result) + } + + pub(super) fn step_expect_declaration( + &mut self, + step: &ExpectDeclarationStep, + ) -> RunnerResult<()> { + self.expect_typed_response("declaration", step.id, &step.result) + } + + pub(super) fn step_expect_type_definition( + &mut self, + step: &ExpectTypeDefinitionStep, + ) -> RunnerResult<()> { + self.expect_typed_response("typeDefinition", step.id, &step.result) + } + + pub(super) fn step_expect_prepare_rename( + &mut self, + step: &ExpectPrepareRenameStep, + ) -> RunnerResult<()> { + self.expect_typed_response("prepareRename", step.id, &step.result) + } + + pub(super) fn step_expect_rename(&mut self, step: &ExpectRenameStep) -> RunnerResult<()> { + self.expect_typed_response("rename", step.id, &step.result) + } + + pub(super) fn step_expect_hover(&mut self, step: &ExpectHoverStep) -> RunnerResult<()> { + let actual = self.response_result::("hover", step.id)?; + match (&actual, &step.result) { + (None, None) => Ok(()), + (None, Some(_)) => Err(AssertionError::HoverMissingResult { id: step.id }.into()), + (Some(actual_hover), None) => { + Self::response_mismatch_json_values("hover", step.id, actual_hover, &Value::Null) + } + (Some(actual_hover), Some(expected_sections)) => { + let actual_sections = + hover_array_sections_from_json(actual_hover).ok_or_else(|| { + AssertionError::HoverMalformedContent { + id: step.id, + hover: actual_hover.to_string(), + } + })?; + let expected_sections = expected_sections + .iter() + .map(crate::scenario::HoverSectionExpectation::to_marked_string) + .collect::>(); + if actual_sections == expected_sections { + Ok(()) + } else { + Self::response_mismatch("hover", step.id, &actual_sections, &expected_sections) + } + } + } + } + + pub(super) fn step_expect_signature_help( + &mut self, + step: &ExpectSignatureHelpStep, + ) -> RunnerResult<()> { + self.expect_typed_response("signatureHelp", step.id, &step.result) + } + + pub(super) fn step_expect_completion( + &mut self, + step: &ExpectCompletionStep, + ) -> RunnerResult<()> { + self.expect_typed_response("completion", step.id, &step.result) + } + + pub(super) fn step_expect_formatting( + &mut self, + step: &ExpectFormattingStep, + ) -> RunnerResult<()> { + self.expect_typed_response("formatting", step.id, &step.result) + } + + pub(super) fn step_expect_range_formatting( + &mut self, + step: &ExpectRangeFormattingStep, + ) -> RunnerResult<()> { + self.expect_typed_response("rangeFormatting", step.id, &step.result) + } + + pub(super) fn step_expect_semantic_tokens_full( + &mut self, + step: &ExpectSemanticTokensFullStep, + ) -> RunnerResult<()> { + self.expect_typed_response("semanticTokens/full", step.id, &step.result) + } + + pub(super) fn step_expect_semantic_tokens_range( + &mut self, + step: &ExpectSemanticTokensRangeStep, + ) -> RunnerResult<()> { + self.expect_typed_response("semanticTokens/range", step.id, &step.result) + } + + pub(super) fn step_expect_inlay_hints( + &mut self, + step: &ExpectInlayHintsStep, + ) -> RunnerResult<()> { + let actual = self.response_result::>("inlayHints", step.id)?; + let actual_json = + serde_json::to_value(&actual).map_err(|source| SerdeError::SerializeValue { + context: "response mismatch actual", + source, + })?; + let expected_json = + serde_json::to_value(&step.result).map_err(|source| SerdeError::SerializeValue { + context: "response mismatch expected", + source, + })?; + if actual_json != expected_json { + return Self::response_mismatch_json_values( + "inlayHints", + step.id, + &actual_json, + &expected_json, + ); + } + Ok(()) + } + + pub(super) fn step_expect_document_symbol( + &mut self, + step: &ExpectDocumentSymbolStep, + ) -> RunnerResult<()> { + self.expect_typed_response("documentSymbol", step.id, &step.result) + } + + pub(super) fn step_expect_workspace_symbol( + &mut self, + step: &ExpectWorkspaceSymbolStep, + ) -> RunnerResult<()> { + self.expect_typed_response("workspaceSymbol", step.id, &step.result) + } + + pub(super) fn step_expect_code_lens(&mut self, step: &ExpectCodeLensStep) -> RunnerResult<()> { + self.expect_typed_response("codeLens", step.id, &step.result) + } + + pub(super) fn step_expect_execute_command( + &mut self, + step: &ExpectExecuteCommandStep, + ) -> RunnerResult<()> { + let response = self.wait_response(step.id, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: "executeCommand", + id: step.id, + error, + } + .into()); + } + if response.result != step.result { + return Self::response_mismatch( + "executeCommand", + step.id, + &response.result, + &step.result, + ); + } + Ok(()) + } + + pub(super) fn step_expect_custom(&mut self, step: &ExpectCustomStep) -> RunnerResult<()> { + let response = self.wait_response(step.id, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: "customRequest", + id: step.id, + error, + } + .into()); + } + if response.result != step.result { + return Self::response_mismatch( + "customRequest", + step.id, + &response.result, + &step.result, + ); + } + Ok(()) + } + + pub(super) fn step_expect_execute_code_lens( + &mut self, + step: &ExpectExecuteCodeLensStep, + ) -> RunnerResult<()> { + let response = self.wait_response(step.id, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: "executeCodeLens", + id: step.id, + error, + } + .into()); + } + if response.result != step.result { + return Self::response_mismatch( + "executeCodeLens", + step.id, + &response.result, + &step.result, + ); + } + Ok(()) + } + + pub(super) fn step_expect_diagnostics( + &mut self, + step: &ExpectDiagnosticsStep, + ) -> RunnerResult<()> { + let actual = self.wait_diagnostics_for_uri(&step.uri, REQUEST_TIMEOUT)?; + if actual.diagnostics != step.diagnostics { + let details = Self::mismatch_details(&actual.diagnostics, &step.diagnostics)?; + return Err(AssertionError::DiagnosticsMismatch { + uri: step.uri.clone(), + details, + } + .into()); + } + Ok(()) + } + + pub(super) fn step_diagnostics_settled( + &mut self, + step: DiagnosticsSettledStep, + ) -> RunnerResult<()> { + let timeout = Duration::from_millis(step.timeout_ms); + let idle = Duration::from_millis(step.idle_ms); + let start = std::time::Instant::now(); + let mut last_diagnostic = self.last_diagnostic_at.unwrap_or(start); + + loop { + if start.elapsed() > timeout { + return Err(TransportError::DiagnosticsDidNotSettle { timeout }.into()); + } + + if last_diagnostic.elapsed() >= idle { + return Ok(()); + } + + let remaining_timeout = timeout.saturating_sub(start.elapsed()); + let remaining_idle = idle.saturating_sub(last_diagnostic.elapsed()); + let wait_for = remaining_timeout.min(remaining_idle); + + match self.conn.receiver.recv_timeout(wait_for) { + Ok(message) => self.capture_background_message(message)?, + Err(crossbeam_channel::RecvTimeoutError::Timeout) => {} + Err(crossbeam_channel::RecvTimeoutError::Disconnected) => { + return Err(TransportError::DiagnosticsSettledDisconnected.into()); + } + } + + if let Some(latest) = self.last_diagnostic_at { + last_diagnostic = latest; + } + } + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs new file mode 100644 index 00000000..b091c7e9 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs @@ -0,0 +1,344 @@ +use std::{collections::BTreeSet, fmt}; + +use serde_json::Value; +use thiserror::Error; + +use super::RunnerResult; + +type UriParseError = ::Err; + +#[derive(Debug, Error)] +pub enum UriError { + #[error("parse {context} uri '{uri}': {source}")] + Parse { + context: &'static str, + uri: String, + source: UriParseError, + }, +} + +pub(super) fn parse_uri(uri: &str, context: &'static str) -> RunnerResult { + uri.parse::() + .map_err(|error| UriError::Parse { + context, + uri: uri.to_owned(), + source: error, + }) + .map_err(Into::into) +} + +pub(super) fn hover_array_sections_from_json( + hover: &Value, +) -> Option> { + let contents = hover.get("contents")?; + let sections = contents.as_array()?; + sections + .iter() + .cloned() + .map(serde_json::from_value) + .collect::, _>>() + .ok() +} + +const MAX_DIFF_LINES: usize = 20; +const MAX_VALUE_PREVIEW_CHARS: usize = 120; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum JsonDifference { + ValueMismatch { + path: String, + actual: Value, + expected: Value, + }, + MissingValue { + path: String, + expected: Value, + }, + UnexpectedValue { + path: String, + actual: Value, + }, + LengthMismatch { + path: String, + actual: usize, + expected: usize, + }, +} + +impl JsonDifference { + fn render(&self) -> String { + match self { + Self::ValueMismatch { + path, + actual, + expected, + } => format!( + "{path}: actual {} != expected {}", + preview_json_value(actual), + preview_json_value(expected) + ), + Self::MissingValue { path, expected } => { + format!( + "{path}: missing value, expected {}", + preview_json_value(expected) + ) + } + Self::UnexpectedValue { path, actual } => { + format!("{path}: unexpected value {}", preview_json_value(actual)) + } + Self::LengthMismatch { + path, + actual, + expected, + } => format!("{path}: length mismatch (actual {actual}, expected {expected})"), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct JsonMismatchReport { + differences: Vec, + truncated: bool, +} + +impl JsonMismatchReport { + fn new() -> Self { + Self { + differences: Vec::new(), + truncated: false, + } + } + + pub fn differences(&self) -> &[JsonDifference] { + &self.differences + } + + pub const fn truncated(&self) -> bool { + self.truncated + } +} + +impl fmt::Display for JsonMismatchReport { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.differences().is_empty() { + return f.write_str("values differ"); + } + + f.write_str("differences:\n")?; + for difference in self.differences() { + f.write_str(" - ")?; + f.write_str(&difference.render())?; + f.write_str("\n")?; + } + if self.truncated() { + f.write_str(" - ... additional differences omitted ...\n")?; + } + Ok(()) + } +} + +pub(super) fn json_mismatch_report(actual: &Value, expected: &Value) -> JsonMismatchReport { + let mut report = JsonMismatchReport::new(); + collect_json_differences("$", actual, expected, &mut report); + report +} + +fn collect_json_differences( + path: &str, + actual: &Value, + expected: &Value, + report: &mut JsonMismatchReport, +) { + if actual == expected || report.truncated { + return; + } + + match (actual, expected) { + (Value::Object(actual_object), Value::Object(expected_object)) => { + let keys: BTreeSet<&str> = actual_object + .keys() + .chain(expected_object.keys()) + .map(String::as_str) + .collect(); + for key in keys { + if report.truncated { + return; + } + let child_path = child_object_path(path, key); + match (actual_object.get(key), expected_object.get(key)) { + (Some(actual_value), Some(expected_value)) => { + collect_json_differences(&child_path, actual_value, expected_value, report); + } + (Some(actual_value), None) => push_difference( + report, + JsonDifference::UnexpectedValue { + path: child_path, + actual: actual_value.clone(), + }, + ), + (None, Some(expected_value)) => push_difference( + report, + JsonDifference::MissingValue { + path: child_path, + expected: expected_value.clone(), + }, + ), + (None, None) => {} + } + } + } + (Value::Array(actual_array), Value::Array(expected_array)) => { + if actual_array.len() != expected_array.len() { + push_difference( + report, + JsonDifference::LengthMismatch { + path: path.to_string(), + actual: actual_array.len(), + expected: expected_array.len(), + }, + ); + } + for (index, (actual_value, expected_value)) in + actual_array.iter().zip(expected_array.iter()).enumerate() + { + if report.truncated { + return; + } + let child_path = format!("{path}[{index}]"); + collect_json_differences(&child_path, actual_value, expected_value, report); + } + } + _ => push_difference( + report, + JsonDifference::ValueMismatch { + path: path.to_string(), + actual: actual.clone(), + expected: expected.clone(), + }, + ), + } +} + +fn push_difference(report: &mut JsonMismatchReport, difference: JsonDifference) { + if report.differences.len() >= MAX_DIFF_LINES { + report.truncated = true; + return; + } + report.differences.push(difference); + if report.differences.len() >= MAX_DIFF_LINES { + report.truncated = true; + } +} + +fn child_object_path(path: &str, key: &str) -> String { + if is_identifier_key(key) { + format!("{path}.{key}") + } else { + format!("{path}[{}]", serde_json::to_string(key).unwrap_or_default()) + } +} + +fn is_identifier_key(key: &str) -> bool { + let mut chars = key.chars(); + let Some(first) = chars.next() else { + return false; + }; + if !(first.is_ascii_alphabetic() || first == '_') { + return false; + } + chars.all(|ch| ch.is_ascii_alphanumeric() || ch == '_') +} + +fn preview_json_value(value: &Value) -> String { + let serialized = serde_json::to_string(value) + .unwrap_or_else(|_| "\"\"".to_string()); + ellipsis(&serialized, MAX_VALUE_PREVIEW_CHARS) +} + +fn ellipsis(input: &str, max_chars: usize) -> String { + let mut result = String::new(); + + for (count, ch) in input.chars().enumerate() { + if count == max_chars { + result.push_str("..."); + return result; + } + result.push(ch); + } + + result +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + use serde_json::json; + + use super::*; + use crate::scenario_runner::RunnerError; + + #[test] + fn parse_uri_reports_structured_error_variant() { + let result = parse_uri("://not-a-uri", "hover"); + let (context, uri, source) = assert_matches!( + result, + Err(RunnerError::Uri(error_box)) => { + let UriError::Parse { + context, + uri, + source, + } = *error_box; + (context, uri, source) + } + ); + assert_eq!(context, "hover"); + assert_eq!(uri, "://not-a-uri"); + assert!(!source.to_string().is_empty()); + } + + #[test] + fn json_mismatch_report_is_structural_and_concise() { + let actual = json!({ + "items": [ + { "label": "a", "kind": 1 }, + { "label": "b", "kind": 2 } + ], + "isIncomplete": false + }); + let expected = json!({ + "items": [ + { "label": "a", "kind": 1 }, + { "label": "c", "kind": 2 } + ], + "isIncomplete": true + }); + + let report = json_mismatch_report(&actual, &expected); + assert_eq!( + report.differences(), + [ + JsonDifference::ValueMismatch { + path: "$.isIncomplete".to_string(), + actual: json!(false), + expected: json!(true), + }, + JsonDifference::ValueMismatch { + path: "$.items[1].label".to_string(), + actual: json!("b"), + expected: json!("c"), + }, + ] + ); + assert!(!report.truncated()); + + let rendered = report.to_string(); + assert_eq!( + rendered, + concat!( + "differences:\n", + " - $.isIncomplete: actual false != expected true\n", + " - $.items[1].label: actual \"b\" != expected \"c\"\n" + ) + ); + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/mod.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/mod.rs new file mode 100644 index 00000000..52f63851 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/mod.rs @@ -0,0 +1,15 @@ +//! In-memory runner for `Scenario` timelines. + +mod document_steps; +mod errors; +mod expectation_steps; +mod helpers; +mod request_steps; +mod runner; +mod transport; + +pub use self::{errors::RunnerError, runner::run_scenario}; +use self::{ + errors::RunnerResult, + runner::{ScenarioRunner, REQUEST_TIMEOUT}, +}; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs new file mode 100644 index 00000000..833251ce --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs @@ -0,0 +1,402 @@ +use std::collections::HashMap; + +use lsp_types::{ + request::{ + CodeActionRequest, CodeLensRequest, Completion, DocumentSymbolRequest, ExecuteCommand, + Formatting, GotoDeclaration, GotoDefinition, GotoTypeDefinition, HoverRequest, + InlayHintRequest, PrepareRenameRequest, RangeFormatting, References, Rename, Request as _, + SemanticTokensFullRequest, SemanticTokensRangeRequest, SignatureHelpRequest, + WorkspaceSymbolRequest, + }, + CodeActionContext, CodeActionParams, CodeLensParams, DocumentFormattingParams, + DocumentRangeFormattingParams, DocumentSymbolParams, ExecuteCommandParams, FormattingOptions, + GotoDefinitionParams, HoverParams, InlayHintParams, PartialResultParams, ReferenceContext, + ReferenceParams, RenameParams, SemanticTokensParams, SemanticTokensRangeParams, + SignatureHelpParams, TextDocumentIdentifier, TextDocumentPositionParams, + WorkDoneProgressParams, WorkspaceSymbolParams, +}; +use thiserror::Error; + +use super::{ + helpers::parse_uri, + transport::{RpcError, SerdeError}, + RunnerResult, ScenarioRunner, +}; +use crate::scenario::{ + RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestCustomStep, + RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, + RequestExecuteCodeLensStep, RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, + RequestInlayHintsStep, RequestPrepareRenameStep, RequestRangeFormattingStep, + RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, + RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, + RequestWorkspaceSymbolStep, +}; + +#[derive(Debug, Error)] +pub enum RequestStepError { + #[error("codeLens request id {request_id} returned null result")] + MissingResult { request_id: i32 }, + #[error("codeLens request id {request_id} returned no lens at index {index} (len={len})")] + IndexOutOfBounds { + request_id: i32, + index: usize, + len: usize, + }, + #[error("codeLens request id {request_id} lens index {index} has no command")] + MissingCommand { request_id: i32, index: usize }, +} + +fn text_document_position_params( + uri: lsp_types::Uri, + position: lsp_types::Position, +) -> TextDocumentPositionParams { + TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri }, + position, + } +} + +fn goto_definition_params( + uri: lsp_types::Uri, + position: lsp_types::Position, +) -> GotoDefinitionParams { + GotoDefinitionParams { + text_document_position_params: text_document_position_params(uri, position), + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + } +} + +impl ScenarioRunner { + pub(super) fn step_request_code_action( + &self, + step: &RequestCodeActionStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "codeAction")?; + let params = CodeActionParams { + text_document: TextDocumentIdentifier { uri }, + range: step.range, + context: CodeActionContext { + diagnostics: step.diagnostics.clone(), + only: step.only.clone(), + trigger_kind: None, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params(step.id, CodeActionRequest::METHOD, params, "codeAction") + } + + pub(super) fn step_request_references(&self, step: &RequestReferencesStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "references")?; + let params = ReferenceParams { + text_document_position: text_document_position_params(uri, step.position), + context: ReferenceContext { + include_declaration: step.include_declaration, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params(step.id, References::METHOD, params, "references") + } + + pub(super) fn step_request_definition(&self, step: &RequestDefinitionStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "definition")?; + let params = goto_definition_params(uri, step.position); + self.send_request_with_params(step.id, GotoDefinition::METHOD, params, "definition") + } + + pub(super) fn step_request_declaration( + &self, + step: &RequestDeclarationStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "declaration")?; + let params = goto_definition_params(uri, step.position); + self.send_request_with_params(step.id, GotoDeclaration::METHOD, params, "declaration") + } + + pub(super) fn step_request_type_definition( + &self, + step: &RequestTypeDefinitionStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "typeDefinition")?; + let params = goto_definition_params(uri, step.position); + self.send_request_with_params( + step.id, + GotoTypeDefinition::METHOD, + params, + "typeDefinition", + ) + } + + pub(super) fn step_request_prepare_rename( + &self, + step: &RequestPrepareRenameStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "prepareRename")?; + let params = text_document_position_params(uri, step.position); + self.send_request_with_params( + step.id, + PrepareRenameRequest::METHOD, + params, + "prepareRename", + ) + } + + pub(super) fn step_request_rename(&self, step: &RequestRenameStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "rename")?; + let params = RenameParams { + text_document_position: text_document_position_params(uri, step.position), + new_name: step.new_name.clone(), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, Rename::METHOD, params, "rename") + } + + pub(super) fn step_request_hover(&self, step: &RequestHoverStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "hover")?; + let params = HoverParams { + text_document_position_params: text_document_position_params(uri, step.position), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, HoverRequest::METHOD, params, "hover") + } + + pub(super) fn step_request_signature_help( + &self, + step: &RequestSignatureHelpStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "signatureHelp")?; + let params = SignatureHelpParams { + text_document_position_params: text_document_position_params(uri, step.position), + work_done_progress_params: WorkDoneProgressParams::default(), + context: None, + }; + self.send_request_with_params( + step.id, + SignatureHelpRequest::METHOD, + params, + "signatureHelp", + ) + } + + pub(super) fn step_request_completion(&self, step: &RequestCompletionStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "completion")?; + let params = lsp_types::CompletionParams { + text_document_position: text_document_position_params(uri, step.position), + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + }; + self.send_request_with_params(step.id, Completion::METHOD, params, "completion") + } + + pub(super) fn step_request_formatting(&self, step: &RequestFormattingStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "formatting")?; + let params = DocumentFormattingParams { + text_document: TextDocumentIdentifier { uri }, + options: FormattingOptions { + tab_size: step.tab_size, + insert_spaces: step.insert_spaces, + properties: HashMap::new(), + trim_trailing_whitespace: step.trim_trailing_whitespace, + insert_final_newline: step.insert_final_newline, + trim_final_newlines: step.trim_final_newlines, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, Formatting::METHOD, params, "formatting") + } + + pub(super) fn step_request_range_formatting( + &self, + step: &RequestRangeFormattingStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "rangeFormatting")?; + let params = DocumentRangeFormattingParams { + text_document: TextDocumentIdentifier { uri }, + range: step.range, + options: FormattingOptions { + tab_size: step.tab_size, + insert_spaces: step.insert_spaces, + properties: HashMap::new(), + trim_trailing_whitespace: step.trim_trailing_whitespace, + insert_final_newline: step.insert_final_newline, + trim_final_newlines: step.trim_final_newlines, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, RangeFormatting::METHOD, params, "rangeFormatting") + } + + pub(super) fn step_request_semantic_tokens_full( + &self, + step: &RequestSemanticTokensFullStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "semanticTokens/full")?; + let params = SemanticTokensParams { + text_document: TextDocumentIdentifier { uri }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params( + step.id, + SemanticTokensFullRequest::METHOD, + params, + "semanticTokens/full", + ) + } + + pub(super) fn step_request_semantic_tokens_range( + &self, + step: &RequestSemanticTokensRangeStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "semanticTokens/range")?; + let params = SemanticTokensRangeParams { + text_document: TextDocumentIdentifier { uri }, + range: step.range, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params( + step.id, + SemanticTokensRangeRequest::METHOD, + params, + "semanticTokens/range", + ) + } + + pub(super) fn step_request_inlay_hints( + &self, + step: &RequestInlayHintsStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "inlayHints")?; + let params = InlayHintParams { + text_document: TextDocumentIdentifier { uri }, + range: step.range, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, InlayHintRequest::METHOD, params, "inlayHints") + } + + pub(super) fn step_request_document_symbol( + &self, + step: &RequestDocumentSymbolStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "documentSymbol")?; + let params = DocumentSymbolParams { + text_document: TextDocumentIdentifier { uri }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params( + step.id, + DocumentSymbolRequest::METHOD, + params, + "documentSymbol", + ) + } + + pub(super) fn step_request_workspace_symbol( + &self, + step: &RequestWorkspaceSymbolStep, + ) -> RunnerResult<()> { + let params = WorkspaceSymbolParams { + query: step.query.clone(), + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params( + step.id, + WorkspaceSymbolRequest::METHOD, + params, + "workspaceSymbol", + ) + } + + pub(super) fn step_request_code_lens(&self, step: &RequestCodeLensStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "codeLens")?; + let params = CodeLensParams { + text_document: TextDocumentIdentifier { uri }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params(step.id, CodeLensRequest::METHOD, params, "codeLens") + } + + pub(super) fn step_request_execute_command( + &self, + step: &RequestExecuteCommandStep, + ) -> RunnerResult<()> { + let params = ExecuteCommandParams { + command: step.command.clone(), + arguments: step.arguments.clone(), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, ExecuteCommand::METHOD, params, "executeCommand") + } + + pub(super) fn step_request_execute_code_lens( + &mut self, + step: &RequestExecuteCodeLensStep, + ) -> RunnerResult<()> { + let response = self.wait_response(step.code_lens_request_id, super::REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: "codeLens", + id: step.code_lens_request_id, + error, + } + .into()); + } + + let code_lenses: Option> = response.result.clone().map_or_else( + || Ok(None), + |value| { + serde_json::from_value(value).map(Some).map_err(|source| { + SerdeError::DeserializeResponseResult { + method: "codeLens", + id: step.code_lens_request_id, + source, + } + }) + }, + )?; + + // Keep response available for an expectCodeLens step. + self.pending_responses.push(response); + + let Some(code_lenses) = code_lenses else { + return Err(RequestStepError::MissingResult { + request_id: step.code_lens_request_id, + } + .into()); + }; + let Some(code_lens) = code_lenses.get(step.index) else { + return Err(RequestStepError::IndexOutOfBounds { + request_id: step.code_lens_request_id, + index: step.index, + len: code_lenses.len(), + } + .into()); + }; + let Some(command) = &code_lens.command else { + return Err(RequestStepError::MissingCommand { + request_id: step.code_lens_request_id, + index: step.index, + } + .into()); + }; + + let params = ExecuteCommandParams { + command: command.command.clone(), + arguments: command.arguments.clone().unwrap_or_default(), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, ExecuteCommand::METHOD, params, "executeCodeLens") + } + + pub(super) fn step_request_custom(&self, step: &RequestCustomStep) -> RunnerResult<()> { + self.send_request_with_params(step.id, &step.method, &step.params, "customRequest") + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs new file mode 100644 index 00000000..f1b66946 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs @@ -0,0 +1,523 @@ +use std::{ + collections::{HashMap, VecDeque}, + thread, + time::{Duration, Instant}, +}; + +use lsp_server::{Connection, Response}; +use lsp_types::{ + request::{Initialize, Request as _, Shutdown}, + ClientCapabilities, InitializeParams, InlayHintWorkspaceClientCapabilities, + WorkspaceClientCapabilities, +}; + +use super::{transport::RpcError, RunnerError, RunnerResult}; +use crate::scenario::{Scenario, ScenarioStep}; + +const INITIALIZE_REQUEST_ID: i32 = 1; +const SHUTDOWN_REQUEST_ID: i32 = 9_999; +pub(super) const REQUEST_TIMEOUT: Duration = Duration::from_secs(5); + +/// Run a full timeline scenario against an in-memory LSP server. +/// +/// `start_server` receives the server-side `Connection` and should run the +/// server event loop until shutdown/exit. +pub fn run_scenario(scenario: &Scenario, start_server: S) -> RunnerResult<()> +where + S: FnOnce(Connection) + Send + 'static, +{ + run_scenario_typed(scenario, start_server) +} + +fn run_scenario_typed(scenario: &Scenario, start_server: S) -> RunnerResult<()> +where + S: FnOnce(Connection) + Send + 'static, +{ + let mut runner = ScenarioRunner::start(start_server); + runner.initialize()?; + for step in &scenario.steps { + runner.run_step(step)?; + } + runner.shutdown() +} + +pub(super) struct ScenarioRunner { + pub(super) conn: Connection, + pub(super) server_thread: thread::JoinHandle<()>, + pub(super) pending_responses: Vec, + pub(super) pending_diagnostics: HashMap>, + pub(super) last_diagnostic_at: Option, +} + +impl ScenarioRunner { + pub(super) fn start(start_server: S) -> Self + where + S: FnOnce(Connection) + Send + 'static, + { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = thread::spawn(move || start_server(server_conn)); + Self { + conn: client_conn, + server_thread, + pending_responses: Vec::new(), + pending_diagnostics: HashMap::new(), + last_diagnostic_at: None, + } + } + + fn initialize(&mut self) -> RunnerResult<()> { + let params = InitializeParams { + capabilities: ClientCapabilities { + workspace: Some(WorkspaceClientCapabilities { + inlay_hint: Some(InlayHintWorkspaceClientCapabilities { + refresh_support: Some(true), + }), + ..WorkspaceClientCapabilities::default() + }), + ..ClientCapabilities::default() + }, + ..InitializeParams::default() + }; + + self.send_request_with_params( + INITIALIZE_REQUEST_ID, + Initialize::METHOD, + params, + "initialize", + )?; + let response = self.wait_response(INITIALIZE_REQUEST_ID, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: Initialize::METHOD, + id: INITIALIZE_REQUEST_ID, + error, + } + .into()); + } + self.send_notification_with_params("initialized", serde_json::json!({}), "initialized")?; + self.drain_background_messages() + } + + fn shutdown(self) -> RunnerResult<()> { + let mut runner = self; + runner.send_request_with_params( + SHUTDOWN_REQUEST_ID, + Shutdown::METHOD, + serde_json::Value::Null, + "shutdown", + )?; + let response = runner.wait_response(SHUTDOWN_REQUEST_ID, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: Shutdown::METHOD, + id: SHUTDOWN_REQUEST_ID, + error, + } + .into()); + } + runner.send_notification_with_params("exit", serde_json::Value::Null, "exit")?; + runner + .server_thread + .join() + .map_err(|_| RunnerError::ServerThreadPanicked) + } + + fn run_step(&mut self, step: &ScenarioStep) -> RunnerResult<()> { + let result = match step { + ScenarioStep::Open(open) => self.step_open(open), + ScenarioStep::ChangeFull(change) => self.step_change_full(change), + ScenarioStep::ChangeIncremental(change) => self.step_change_incremental(change), + ScenarioStep::Save(save) => self.step_save(save), + ScenarioStep::Close(close) => self.step_close(close), + ScenarioStep::Config(config) => self.step_config(config), + ScenarioStep::WriteFile(write_file) => Self::step_write_file(write_file), + ScenarioStep::DeleteFile(delete_file) => Self::step_delete_file(delete_file), + ScenarioStep::NotifyWatchedFiles(watched_files) => { + self.step_notify_watched_files(watched_files) + } + ScenarioStep::RequestCodeAction(request) => self.step_request_code_action(request), + ScenarioStep::ExpectCodeAction(expectation) => { + self.step_expect_code_action(expectation) + } + ScenarioStep::RequestReferences(request) => self.step_request_references(request), + ScenarioStep::ExpectReferences(expectation) => self.step_expect_references(expectation), + ScenarioStep::RequestDefinition(request) => self.step_request_definition(request), + ScenarioStep::ExpectDefinition(expectation) => self.step_expect_definition(expectation), + ScenarioStep::RequestDeclaration(request) => self.step_request_declaration(request), + ScenarioStep::ExpectDeclaration(expectation) => { + self.step_expect_declaration(expectation) + } + ScenarioStep::RequestTypeDefinition(request) => { + self.step_request_type_definition(request) + } + ScenarioStep::ExpectTypeDefinition(expectation) => { + self.step_expect_type_definition(expectation) + } + ScenarioStep::RequestPrepareRename(request) => { + self.step_request_prepare_rename(request) + } + ScenarioStep::ExpectPrepareRename(expectation) => { + self.step_expect_prepare_rename(expectation) + } + ScenarioStep::RequestRename(request) => self.step_request_rename(request), + ScenarioStep::ExpectRename(expectation) => self.step_expect_rename(expectation), + ScenarioStep::RequestHover(request) => self.step_request_hover(request), + ScenarioStep::ExpectHover(expectation) => self.step_expect_hover(expectation), + ScenarioStep::RequestSignatureHelp(request) => { + self.step_request_signature_help(request) + } + ScenarioStep::ExpectSignatureHelp(expectation) => { + self.step_expect_signature_help(expectation) + } + ScenarioStep::RequestCompletion(request) => self.step_request_completion(request), + ScenarioStep::ExpectCompletion(expectation) => self.step_expect_completion(expectation), + ScenarioStep::RequestFormatting(request) => self.step_request_formatting(request), + ScenarioStep::ExpectFormatting(expectation) => self.step_expect_formatting(expectation), + ScenarioStep::RequestRangeFormatting(request) => { + self.step_request_range_formatting(request) + } + ScenarioStep::ExpectRangeFormatting(expectation) => { + self.step_expect_range_formatting(expectation) + } + ScenarioStep::RequestSemanticTokensFull(request) => { + self.step_request_semantic_tokens_full(request) + } + ScenarioStep::ExpectSemanticTokensFull(expectation) => { + self.step_expect_semantic_tokens_full(expectation) + } + ScenarioStep::RequestSemanticTokensRange(request) => { + self.step_request_semantic_tokens_range(request) + } + ScenarioStep::ExpectSemanticTokensRange(expectation) => { + self.step_expect_semantic_tokens_range(expectation) + } + ScenarioStep::RequestInlayHints(request) => self.step_request_inlay_hints(request), + ScenarioStep::ExpectInlayHints(expectation) => { + self.step_expect_inlay_hints(expectation) + } + ScenarioStep::RequestDocumentSymbol(request) => { + self.step_request_document_symbol(request) + } + ScenarioStep::ExpectDocumentSymbol(expectation) => { + self.step_expect_document_symbol(expectation) + } + ScenarioStep::RequestWorkspaceSymbol(request) => { + self.step_request_workspace_symbol(request) + } + ScenarioStep::ExpectWorkspaceSymbol(expectation) => { + self.step_expect_workspace_symbol(expectation) + } + ScenarioStep::RequestCodeLens(request) => self.step_request_code_lens(request), + ScenarioStep::ExpectCodeLens(expectation) => self.step_expect_code_lens(expectation), + ScenarioStep::RequestExecuteCommand(request) => { + self.step_request_execute_command(request) + } + ScenarioStep::ExpectExecuteCommand(expectation) => { + self.step_expect_execute_command(expectation) + } + ScenarioStep::RequestExecuteCodeLens(request) => { + self.step_request_execute_code_lens(request) + } + ScenarioStep::ExpectExecuteCodeLens(expectation) => { + self.step_expect_execute_code_lens(expectation) + } + ScenarioStep::RequestCustom(request) => self.step_request_custom(request), + ScenarioStep::ExpectCustom(expectation) => self.step_expect_custom(expectation), + ScenarioStep::ExpectDiagnostics(expectation) => { + self.step_expect_diagnostics(expectation) + } + ScenarioStep::DiagnosticsSettled(settled) => self.step_diagnostics_settled(*settled), + }; + result?; + self.drain_background_messages() + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + use lsp_server::{Message, Notification, Response}; + use lsp_types::{ + notification::{DidOpenTextDocument, Notification as _, PublishDiagnostics}, + request::{HoverRequest, InlayHintRefreshRequest, Request as _}, + Diagnostic, DiagnosticSeverity, Hover, HoverContents, MarkedString, Position, + PublishDiagnosticsParams, Range, + }; + + use super::{super::transport::RpcError, run_scenario}; + use crate::{ + scenario::{ + DiagnosticsSettledStep, ExpectDiagnosticsStep, ExpectHoverStep, + HoverSectionExpectation, OpenStep, RequestHoverStep, Scenario, ScenarioStep, + }, + scenario_runner::{helpers::JsonDifference, RunnerError}, + }; + + fn test_diagnostic() -> Diagnostic { + Diagnostic { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 1, + }, + }, + severity: Some(DiagnosticSeverity::WARNING), + code: None, + code_description: None, + source: Some("scenario-test".to_string()), + message: "test diagnostic".to_string(), + related_information: None, + tags: None, + data: None, + } + } + + fn test_hover() -> Hover { + Hover { + contents: HoverContents::Array(vec![MarkedString::String("`number`".to_string())]), + range: None, + } + } + + #[test] + fn run_scenario_handles_requests_and_diagnostics() -> Result<(), super::RunnerError> { + let uri = "file:///workspace/main.jsonnet".to_string(); + let expected_diagnostic = test_diagnostic(); + let expected_hover = test_hover(); + let scenario = Scenario::new(vec![ + ScenarioStep::Open(OpenStep { + uri: uri.clone(), + text: "42\n".to_string(), + language_id: "jsonnet".to_string(), + version: 1, + }), + ScenarioStep::RequestHover(RequestHoverStep { + id: 7, + uri: uri.clone(), + position: Position { + line: 0, + character: 0, + }, + }), + ScenarioStep::ExpectHover(ExpectHoverStep { + id: 7, + result: Some(vec![HoverSectionExpectation::Type { + ty: "number".to_string(), + }]), + }), + ScenarioStep::ExpectDiagnostics(ExpectDiagnosticsStep { + uri, + diagnostics: vec![expected_diagnostic.clone()], + }), + ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { + timeout_ms: 300, + idle_ms: 30, + }), + ]); + + run_scenario(&scenario, move |connection| loop { + let Ok(message) = connection.receiver.recv() else { + break; + }; + + match message { + Message::Request(request) => { + let response = match request.method.as_str() { + lsp_types::request::Initialize::METHOD => { + Response::new_ok(request.id, serde_json::json!({})) + } + HoverRequest::METHOD => { + let Ok(result) = serde_json::to_value(&expected_hover) else { + break; + }; + Response::new_ok(request.id, result) + } + lsp_types::request::Shutdown::METHOD => { + Response::new_ok(request.id, serde_json::Value::Null) + } + _ => Response::new_ok(request.id, serde_json::Value::Null), + }; + if connection.sender.send(Message::Response(response)).is_err() { + break; + } + } + Message::Notification(notification) => { + if notification.method == DidOpenTextDocument::METHOD { + let Ok(params) = serde_json::from_value::< + lsp_types::DidOpenTextDocumentParams, + >(notification.params) else { + break; + }; + let publish = PublishDiagnosticsParams { + uri: params.text_document.uri, + version: Some(params.text_document.version), + diagnostics: vec![expected_diagnostic.clone()], + }; + let Ok(payload) = serde_json::to_value(publish) else { + break; + }; + let diag = + Notification::new(PublishDiagnostics::METHOD.to_string(), payload); + if connection.sender.send(Message::Notification(diag)).is_err() { + break; + } + } else if notification.method == "exit" { + break; + } + } + Message::Response(_) => {} + } + }) + } + + #[test] + fn run_scenario_auto_handles_server_initiated_requests() { + let scenario = Scenario::new(vec![]); + let refresh_id = lsp_server::RequestId::from(42_i32); + + let result = run_scenario(&scenario, move |connection| { + let mut saw_refresh_response = false; + + loop { + let Ok(message) = connection.receiver.recv() else { + break; + }; + + match message { + Message::Request(request) => { + let response = match request.method.as_str() { + lsp_types::request::Initialize::METHOD => { + Response::new_ok(request.id, serde_json::json!({})) + } + lsp_types::request::Shutdown::METHOD => { + Response::new_ok(request.id, serde_json::Value::Null) + } + _ => Response::new_ok(request.id, serde_json::Value::Null), + }; + if connection.sender.send(Message::Response(response)).is_err() { + break; + } + } + Message::Notification(notification) => { + if notification.method == "initialized" { + let refresh = lsp_server::Request::new( + refresh_id.clone(), + InlayHintRefreshRequest::METHOD.to_string(), + serde_json::Value::Null, + ); + if connection.sender.send(Message::Request(refresh)).is_err() { + break; + } + } else if notification.method == "exit" { + break; + } + } + Message::Response(response) => { + if response.id == refresh_id { + assert_matches!( + response, + Response { + id, + result: Some(serde_json::Value::Null), + error: None, + } if id == refresh_id + ); + saw_refresh_response = true; + } + } + } + } + + assert!( + saw_refresh_response, + "client should acknowledge server-initiated refresh requests", + ); + }); + assert!( + result.is_ok(), + "scenario should finish successfully: {result:?}" + ); + } + + #[test] + fn run_scenario_reports_mismatched_expectation() { + let scenario = Scenario::new(vec![ + ScenarioStep::RequestHover(RequestHoverStep { + id: 5, + uri: "file:///workspace/main.jsonnet".to_string(), + position: Position { + line: 0, + character: 0, + }, + }), + ScenarioStep::ExpectHover(ExpectHoverStep { + id: 5, + result: None, + }), + ]); + + let result = run_scenario(&scenario, move |connection| loop { + let Ok(message) = connection.receiver.recv() else { + break; + }; + match message { + Message::Request(request) => { + let response = match request.method.as_str() { + lsp_types::request::Initialize::METHOD => { + Response::new_ok(request.id, serde_json::json!({})) + } + HoverRequest::METHOD => { + let Ok(result) = serde_json::to_value(test_hover()) else { + break; + }; + Response::new_ok(request.id, result) + } + lsp_types::request::Shutdown::METHOD => { + Response::new_ok(request.id, serde_json::Value::Null) + } + _ => Response::new_ok(request.id, serde_json::Value::Null), + }; + if connection.sender.send(Message::Response(response)).is_err() { + break; + } + } + Message::Notification(notification) => { + if notification.method == "exit" { + break; + } + } + Message::Response(_) => {} + } + }); + + let error = result.expect_err("scenario should report mismatch"); + let error_box = assert_matches!( + error, + RunnerError::Rpc(error_box) => error_box + ); + let details = assert_matches!( + *error_box, + RpcError::ResponseMismatch { + method: "hover", + id: 5, + details, + } => details + ); + assert_eq!( + details.differences(), + [JsonDifference::ValueMismatch { + path: "$".to_string(), + actual: serde_json::json!({ + "contents": [ + "`number`" + ] + }), + expected: serde_json::Value::Null, + }] + ); + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs new file mode 100644 index 00000000..664fb810 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs @@ -0,0 +1,265 @@ +use std::time::{Duration, Instant}; + +use crossbeam_channel::{RecvTimeoutError, TryRecvError}; +use lsp_server::{Message, Notification, Request, Response, ResponseError}; +use lsp_types::notification::{Notification as _, PublishDiagnostics}; +use serde::{de::DeserializeOwned, Serialize}; +use thiserror::Error; + +use super::{helpers::JsonMismatchReport, RunnerResult, ScenarioRunner, REQUEST_TIMEOUT}; + +#[derive(Debug, Error)] +pub enum SerdeError { + #[error("serialize {context}: {source}")] + SerializeParams { + context: &'static str, + #[source] + source: serde_json::Error, + }, + #[error("deserialize {method} response result for id {id}: {source}")] + DeserializeResponseResult { + method: &'static str, + id: i32, + #[source] + source: serde_json::Error, + }, + #[error("deserialize publishDiagnostics params: {source}")] + DeserializePublishDiagnostics { + #[source] + source: serde_json::Error, + }, + #[error("serialize {context}: {source}")] + SerializeValue { + context: &'static str, + #[source] + source: serde_json::Error, + }, +} + +#[derive(Debug, Error)] +pub enum TransportError { + #[error("timed out waiting for response id {id}")] + ResponseTimeout { id: i32 }, + #[error("connection closed while waiting for response")] + ResponseDisconnected, + #[error("timed out waiting for diagnostics for uri {uri}")] + DiagnosticsTimeout { uri: String }, + #[error("connection closed while waiting for diagnostics")] + DiagnosticsDisconnected, + #[error("diagnostics did not settle within {timeout:?}")] + DiagnosticsDidNotSettle { timeout: Duration }, + #[error("connection closed while waiting for diagnostics to settle")] + DiagnosticsSettledDisconnected, + #[error("send notification failed: {source}")] + SendNotification { + #[source] + source: Box>, + }, + #[error("send request failed: {source}")] + SendRequest { + #[source] + source: Box>, + }, + #[error("send response failed: {source}")] + SendResponse { + #[source] + source: Box>, + }, +} + +#[derive(Debug, Error)] +pub enum RpcError { + #[error("response {method} id {id} returned error: {error:?}")] + ResponseReturnedError { + method: &'static str, + id: i32, + error: ResponseError, + }, + #[error("{method} response mismatch for id {id}\n{details}")] + ResponseMismatch { + method: &'static str, + id: i32, + details: JsonMismatchReport, + }, +} + +impl ScenarioRunner { + pub(super) fn drain_background_messages(&mut self) -> RunnerResult<()> { + loop { + match self.conn.receiver.try_recv() { + Ok(message) => self.capture_background_message(message)?, + Err(TryRecvError::Empty | TryRecvError::Disconnected) => return Ok(()), + } + } + } + + pub(super) fn send_notification_with_params( + &self, + method: &str, + params: P, + context: &'static str, + ) -> RunnerResult<()> { + let payload = serde_json::to_value(params) + .map_err(|source| SerdeError::SerializeParams { context, source })?; + self.send_notification(Notification::new(method.to_owned(), payload)) + } + + pub(super) fn send_request_with_params( + &self, + id: i32, + method: &str, + params: P, + context: &'static str, + ) -> RunnerResult<()> { + let payload = serde_json::to_value(params) + .map_err(|source| SerdeError::SerializeParams { context, source })?; + self.send_request(Request::new(id.into(), method.to_owned(), payload)) + } + + pub(super) fn response_result( + &mut self, + method: &'static str, + id: i32, + ) -> RunnerResult> + where + T: DeserializeOwned, + { + let response = self.wait_response(id, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { method, id, error }.into()); + } + response.result.map_or_else( + || Ok(None), + |value| { + serde_json::from_value(value) + .map_err(|source| SerdeError::DeserializeResponseResult { method, id, source }) + .map_err(Into::into) + }, + ) + } + + pub(super) fn wait_response(&mut self, id: i32, timeout: Duration) -> RunnerResult { + if let Some(index) = self + .pending_responses + .iter() + .position(|response| response.id == id.into()) + { + return Ok(self.pending_responses.swap_remove(index)); + } + + let deadline = Instant::now() + timeout; + loop { + let remaining = deadline.saturating_duration_since(Instant::now()); + if remaining.is_zero() { + return Err(TransportError::ResponseTimeout { id }.into()); + } + + match self.conn.receiver.recv_timeout(remaining) { + Ok(Message::Response(response)) if response.id == id.into() => return Ok(response), + Ok(message) => self.capture_background_message(message)?, + Err(RecvTimeoutError::Timeout) => { + return Err(TransportError::ResponseTimeout { id }.into()); + } + Err(RecvTimeoutError::Disconnected) => { + return Err(TransportError::ResponseDisconnected.into()); + } + } + } + } + + pub(super) fn wait_diagnostics_for_uri( + &mut self, + uri: &str, + timeout: Duration, + ) -> RunnerResult { + if let Some(queue) = self.pending_diagnostics.get_mut(uri) { + if let Some(params) = queue.pop_front() { + return Ok(params); + } + } + + let deadline = Instant::now() + timeout; + loop { + let remaining = deadline.saturating_duration_since(Instant::now()); + if remaining.is_zero() { + return Err(TransportError::DiagnosticsTimeout { + uri: uri.to_owned(), + } + .into()); + } + match self.conn.receiver.recv_timeout(remaining) { + Ok(message) => { + self.capture_background_message(message)?; + if let Some(queue) = self.pending_diagnostics.get_mut(uri) { + if let Some(params) = queue.pop_front() { + return Ok(params); + } + } + } + Err(RecvTimeoutError::Timeout) => { + return Err(TransportError::DiagnosticsTimeout { + uri: uri.to_owned(), + } + .into()); + } + Err(RecvTimeoutError::Disconnected) => { + return Err(TransportError::DiagnosticsDisconnected.into()); + } + } + } + } + + pub(super) fn capture_background_message(&mut self, message: Message) -> RunnerResult<()> { + match message { + Message::Response(response) => { + self.pending_responses.push(response); + Ok(()) + } + Message::Notification(notification) + if notification.method == PublishDiagnostics::METHOD => + { + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notification.params) + .map_err(|source| SerdeError::DeserializePublishDiagnostics { source })?; + let key = params.uri.as_str().to_owned(); + self.pending_diagnostics + .entry(key) + .or_default() + .push_back(params); + self.last_diagnostic_at = Some(Instant::now()); + Ok(()) + } + Message::Notification(_) => Ok(()), + Message::Request(request) => { + let response = Response::new_ok(request.id, serde_json::Value::Null); + self.conn + .sender + .send(Message::Response(response)) + .map_err(|source| TransportError::SendResponse { + source: Box::new(source), + }) + .map_err(Into::into) + } + } + } + + fn send_notification(&self, notification: Notification) -> RunnerResult<()> { + self.conn + .sender + .send(Message::Notification(notification)) + .map_err(|source| TransportError::SendNotification { + source: Box::new(source), + }) + .map_err(Into::into) + } + + fn send_request(&self, request: Request) -> RunnerResult<()> { + self.conn + .sender + .send(Message::Request(request)) + .map_err(|source| TransportError::SendRequest { + source: Box::new(source), + }) + .map_err(Into::into) + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs new file mode 100644 index 00000000..76dd9f49 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs @@ -0,0 +1,1471 @@ +//! Semantic compilation from YAML script AST to executable scenario steps. +//! +//! This module translates deserialized script records into +//! [`crate::scenario::ScenarioStep`] values. During compilation it: +//! - resolves relative paths and file URIs from the scenario base directory +//! - parses and tracks inline markers through file mutations +//! - allocates/matches request IDs and aliases +//! - translates shorthand expected payloads into concrete LSP structures +//! - enforces cross-step invariants (duplicate aliases, unknown markers, etc.) + +use std::{ + collections::{BTreeMap, HashMap}, + path::Path, +}; + +use lsp_types::CodeActionKind; +use serde::Deserialize; +use thiserror::Error; + +use super::{ + inputs::{ + resolve_file_uri_shorthand_json, CodeActionOrCommandInput, CodeLensInput, DiagnosticInput, + ExpectCompletionScriptStep, ExpectDocumentSymbolScriptStep, ExpectExecuteCommandScriptStep, + ExpectFormattingScriptStep, ExpectHoverScriptStep, ExpectPrepareRenameScriptStep, + ExpectSignatureHelpScriptStep, GotoDefinitionResponseInput, InlayHintInput, InputError, + LocationInput, SemanticTokensResultInput, WorkspaceEditInput, WorkspaceSymbolResponseInput, + }, + markers::{MarkerError, MarkerStore, PositionSpec, RangeInput}, + paths::{file_path, file_uri}, + registry::{RequestKind, RequestRegistry, RequestRegistryError}, +}; +use crate::scenario::{ + ChangeFullStep, ChangeIncrementalStep, CloseStep, ConfigStep, DeleteFileStep, + DiagnosticsSettledStep, ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, + ExpectCustomStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, + ExpectDocumentSymbolStep, ExpectExecuteCodeLensStep, ExpectExecuteCommandStep, + ExpectFormattingStep, ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, + ExpectRangeFormattingStep, ExpectReferencesStep, ExpectRenameStep, + ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, + ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, HoverSectionExpectation, + NotifyWatchedFilesStep, OpenStep, RequestCodeActionStep, RequestCodeLensStep, + RequestCompletionStep, RequestCustomStep, RequestDeclarationStep, RequestDefinitionStep, + RequestDocumentSymbolStep, RequestExecuteCodeLensStep, RequestExecuteCommandStep, + RequestFormattingStep, RequestHoverStep, RequestInlayHintsStep, RequestPrepareRenameStep, + RequestRangeFormattingStep, RequestReferencesStep, RequestRenameStep, + RequestSemanticTokensFullStep, RequestSemanticTokensRangeStep, RequestSignatureHelpStep, + RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, SaveStep, Scenario, + ScenarioFileChangeType, ScenarioStep, WatchedFileChangeStep, WriteFileStep, +}; + +/// Parsed YAML root object for one scenario script file. +/// +/// Step order is significant and preserved exactly during compilation. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ScenarioScript { + steps: Vec, +} + +/// Error returned when deserialized script data is semantically invalid. +/// +/// This wraps human-readable validation failures discovered during compilation +/// (for example unknown marker names, alias mismatches, or missing request +/// context for shorthand expectations). +#[derive(Debug, Error)] +pub enum CompileScenarioError { + #[error(transparent)] + Registry(#[from] RequestRegistryError), + #[error(transparent)] + Markers(#[from] MarkerError), + #[error(transparent)] + Inputs(#[from] InputError), + #[error("create: `files` must include at least one file")] + CreateRequiresFiles, + #[error( + "create: `open` references unknown file '{relative_path}', expected one of: {expected:?}" + )] + CreateOpenReferencesUnknownFile { + relative_path: String, + expected: Vec, + }, + #[error("create: missing parsed text for '{relative_path}'")] + CreateMissingParsedText { relative_path: String }, + #[error("{step}: missing request file context for request id {request_id}")] + MissingRequestFileContext { step: &'static str, request_id: i32 }, +} + +impl ScenarioScript { + /// Compile parsed script steps into an executable scenario timeline. + /// + /// Compilation is stateful: it threads marker state and request alias state + /// through the step sequence so later steps can reference earlier outputs. + /// + /// Most script steps translate to exactly one executable step, with two notable + /// expansions: + /// - `create` expands to one or more `writeFile` steps plus `open` steps. + /// - `expectTypes` expands to repeated `requestHover` + `expectHover` pairs. + pub(super) fn compile(self, base_dir: &Path) -> Result { + let mut registry = RequestRegistry::new(); + let mut marker_store = MarkerStore::new(); + // Some expect-step shorthands need the originating request file in order to + // resolve marker-backed positions/ranges in their expected payloads. + let mut code_action_request_files = HashMap::new(); + let mut code_lens_request_files = HashMap::new(); + let mut inlay_hint_request_files = HashMap::new(); + let mut completion_request_files = HashMap::new(); + let mut prepare_rename_request_files = HashMap::new(); + let mut document_symbol_request_files = HashMap::new(); + let mut semantic_tokens_full_request_files = HashMap::new(); + let mut semantic_tokens_range_request_files = HashMap::new(); + let mut steps = Vec::with_capacity(self.steps.len()); + + // Compile in source order so aliases, marker state, and pending requests + // evolve exactly as authored in the scenario script. + for step in self.steps { + let compiled = match step { + ScenarioScriptStep::Open(step) => { + let text = + marker_store.register_full_text(&step.file, step.text, "open.text")?; + vec![ScenarioStep::Open(OpenStep { + uri: file_uri(base_dir, &step.file), + text, + language_id: step.language_id, + version: step.version, + })] + } + ScenarioScriptStep::Create(step) => { + compile_create_step(step, base_dir, &mut marker_store)? + } + ScenarioScriptStep::ChangeFull(step) => { + let text = marker_store.register_full_text( + &step.file, + step.text, + "changeFull.text", + )?; + vec![ScenarioStep::ChangeFull(ChangeFullStep { + uri: file_uri(base_dir, &step.file), + text, + version: step.version, + })] + } + ScenarioScriptStep::ChangeIncremental(step) => { + let range = + marker_store.resolve_range(&step.file, step.range, "changeIncremental")?; + let text = marker_store.register_incremental_text( + &step.file, + range, + step.text, + "changeIncremental.text", + )?; + vec![ScenarioStep::ChangeIncremental(ChangeIncrementalStep { + uri: file_uri(base_dir, &step.file), + range, + text, + version: step.version, + })] + } + ScenarioScriptStep::Save(step) => vec![ScenarioStep::Save(SaveStep { + uri: file_uri(base_dir, &step.file), + text: step + .text + .map(|text| marker_store.register_full_text(&step.file, text, "save.text")) + .transpose()?, + })], + ScenarioScriptStep::Close(step) => vec![ScenarioStep::Close(CloseStep { + uri: file_uri(base_dir, &step.file), + })], + ScenarioScriptStep::Config(step) => vec![ScenarioStep::Config(ConfigStep { + settings: step.settings, + })], + ScenarioScriptStep::WriteFile(step) => { + let text = + marker_store.register_full_text(&step.path, step.text, "writeFile.text")?; + vec![ScenarioStep::WriteFile(WriteFileStep { + path: file_path(base_dir, &step.path), + text, + })] + } + ScenarioScriptStep::DeleteFile(step) => { + marker_store.remove(&step.path); + vec![ScenarioStep::DeleteFile(DeleteFileStep { + path: file_path(base_dir, &step.path), + })] + } + ScenarioScriptStep::NotifyWatchedFiles(step) => { + for change in &step.changes { + if change.change_type == ScenarioFileChangeType::Deleted { + marker_store.remove(&change.path); + } + } + vec![ScenarioStep::NotifyWatchedFiles(NotifyWatchedFilesStep { + changes: step + .changes + .into_iter() + .map(|change| WatchedFileChangeStep { + uri: file_uri(base_dir, &change.path), + change_type: change.change_type, + }) + .collect(), + })] + } + ScenarioScriptStep::RequestCodeAction(step) => { + let diagnostics = step + .diagnostics + .into_iter() + .map(|diagnostic| { + diagnostic.resolve_with_file( + &marker_store, + &step.file, + "requestCodeAction.diagnostics", + ) + }) + .collect::, _>>()?; + let request_id = + registry.allocate(RequestKind::CodeAction, step.request_name)?; + code_action_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestCodeAction(RequestCodeActionStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + range: marker_store.resolve_range( + &step.file, + step.range, + "requestCodeAction", + )?, + diagnostics, + only: step.only, + })] + } + ScenarioScriptStep::ExpectCodeAction(step) => { + let request_id = + registry.claim(RequestKind::CodeAction, step.request.as_str())?; + let default_file = code_action_request_files + .get(&request_id) + .map(String::as_str); + let result = step + .result + .map(|actions| { + actions + .into_iter() + .map(|action| { + action.resolve( + base_dir, + &marker_store, + default_file, + "expectCodeAction.result", + ) + }) + .collect::, _>>() + }) + .transpose()?; + vec![ScenarioStep::ExpectCodeAction(ExpectCodeActionStep { + id: request_id, + result, + })] + } + ScenarioScriptStep::RequestReferences(step) => { + vec![ScenarioStep::RequestReferences(RequestReferencesStep { + id: registry.allocate(RequestKind::References, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestReferences", + )?, + include_declaration: step.include_declaration, + })] + } + ScenarioScriptStep::ExpectReferences(step) => { + let result = step + .result + .map(|locations| { + locations + .into_iter() + .map(|location| { + location.resolve_location( + base_dir, + &marker_store, + "expectReferences.result", + ) + }) + .collect::, _>>() + }) + .transpose()?; + vec![ScenarioStep::ExpectReferences(ExpectReferencesStep { + id: registry.claim(RequestKind::References, step.request.as_str())?, + result, + })] + } + ScenarioScriptStep::RequestDefinition(step) => { + vec![ScenarioStep::RequestDefinition(RequestDefinitionStep { + id: registry.allocate(RequestKind::Definition, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestDefinition", + )?, + })] + } + ScenarioScriptStep::ExpectDefinition(step) => { + vec![ScenarioStep::ExpectDefinition(ExpectDefinitionStep { + id: registry.claim(RequestKind::Definition, step.request.as_str())?, + result: step + .result + .map(|result| { + result.resolve(base_dir, &marker_store, "expectDefinition.result") + }) + .transpose()?, + })] + } + ScenarioScriptStep::RequestDeclaration(step) => { + vec![ScenarioStep::RequestDeclaration(RequestDeclarationStep { + id: registry.allocate(RequestKind::Declaration, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestDeclaration", + )?, + })] + } + ScenarioScriptStep::ExpectDeclaration(step) => { + vec![ScenarioStep::ExpectDeclaration(ExpectDeclarationStep { + id: registry.claim(RequestKind::Declaration, step.request.as_str())?, + result: step + .result + .map(|result| { + result.resolve(base_dir, &marker_store, "expectDeclaration.result") + }) + .transpose()?, + })] + } + ScenarioScriptStep::RequestTypeDefinition(step) => { + vec![ScenarioStep::RequestTypeDefinition( + RequestTypeDefinitionStep { + id: registry + .allocate(RequestKind::TypeDefinition, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestTypeDefinition", + )?, + }, + )] + } + ScenarioScriptStep::ExpectTypeDefinition(step) => { + vec![ScenarioStep::ExpectTypeDefinition( + ExpectTypeDefinitionStep { + id: registry + .claim(RequestKind::TypeDefinition, step.request.as_str())?, + result: step + .result + .map(|result| { + result.resolve( + base_dir, + &marker_store, + "expectTypeDefinition.result", + ) + }) + .transpose()?, + }, + )] + } + ScenarioScriptStep::RequestPrepareRename(step) => { + let request_id = + registry.allocate(RequestKind::PrepareRename, step.request_name)?; + prepare_rename_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestPrepareRename( + RequestPrepareRenameStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestPrepareRename", + )?, + }, + )] + } + ScenarioScriptStep::ExpectPrepareRename(step) => { + let request_id = + registry.claim(RequestKind::PrepareRename, step.request.as_str())?; + let file = prepare_rename_request_files.get(&request_id).ok_or( + CompileScenarioError::MissingRequestFileContext { + step: "expectPrepareRename", + request_id, + }, + )?; + vec![ScenarioStep::ExpectPrepareRename(ExpectPrepareRenameStep { + id: request_id, + result: step.resolve_result( + &marker_store, + file, + "expectPrepareRename.result", + )?, + })] + } + ScenarioScriptStep::RequestRename(step) => { + vec![ScenarioStep::RequestRename(RequestRenameStep { + id: registry.allocate(RequestKind::Rename, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestRename", + )?, + new_name: step.new_name, + })] + } + ScenarioScriptStep::ExpectRename(step) => { + vec![ScenarioStep::ExpectRename(ExpectRenameStep { + id: registry.claim(RequestKind::Rename, step.request.as_str())?, + result: step + .result + .map(|result| { + result.resolve(base_dir, &marker_store, "expectRename.result") + }) + .transpose()?, + })] + } + ScenarioScriptStep::RequestHover(step) => { + vec![ScenarioStep::RequestHover(RequestHoverStep { + id: registry.allocate(RequestKind::Hover, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestHover", + )?, + })] + } + ScenarioScriptStep::ExpectHover(step) => { + vec![ScenarioStep::ExpectHover(ExpectHoverStep { + id: registry.claim(RequestKind::Hover, step.request.as_str())?, + result: step.result, + })] + } + ScenarioScriptStep::RequestSignatureHelp(step) => { + vec![ScenarioStep::RequestSignatureHelp( + RequestSignatureHelpStep { + id: registry.allocate(RequestKind::SignatureHelp, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestSignatureHelp", + )?, + }, + )] + } + ScenarioScriptStep::ExpectSignatureHelp(step) => { + vec![ScenarioStep::ExpectSignatureHelp(ExpectSignatureHelpStep { + id: registry.claim(RequestKind::SignatureHelp, step.request.as_str())?, + result: step.result, + })] + } + ScenarioScriptStep::ExpectTypes(step) => { + let file = step.file; + // Sugar step: each type check expands to an isolated hover + // request/expect pair so failures report per-check locations. + step.checks + .into_iter() + .try_fold(Vec::new(), |mut steps, check| { + let id = registry.allocate(RequestKind::Hover, None)?; + let request = ScenarioStep::RequestHover(RequestHoverStep { + id, + uri: file_uri(base_dir, &file), + position: marker_store.resolve_position( + &file, + check.at, + "expectTypes.checks", + )?, + }); + let expect = ScenarioStep::ExpectHover(ExpectHoverStep { + id, + result: Some(vec![HoverSectionExpectation::Type { + ty: check.expected_type, + }]), + }); + steps.push(request); + steps.push(expect); + Ok::, CompileScenarioError>(steps) + })? + } + ScenarioScriptStep::RequestCompletion(step) => { + let request_id = + registry.allocate(RequestKind::Completion, step.request_name)?; + completion_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestCompletion(RequestCompletionStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestCompletion", + )?, + })] + } + ScenarioScriptStep::ExpectCompletion(step) => { + let request_id = + registry.claim(RequestKind::Completion, step.request.as_str())?; + let file = completion_request_files.get(&request_id).ok_or({ + CompileScenarioError::MissingRequestFileContext { + step: "expectCompletion", + request_id, + } + })?; + vec![ScenarioStep::ExpectCompletion(ExpectCompletionStep { + id: request_id, + result: step.resolve_result( + &marker_store, + file, + "expectCompletion.result", + )?, + })] + } + ScenarioScriptStep::RequestFormatting(step) => { + vec![ScenarioStep::RequestFormatting(RequestFormattingStep { + id: registry.allocate(RequestKind::Formatting, step.request_name)?, + uri: file_uri(base_dir, &step.file), + tab_size: step.tab_size, + insert_spaces: step.insert_spaces, + trim_trailing_whitespace: step.trim_trailing_whitespace, + insert_final_newline: step.insert_final_newline, + trim_final_newlines: step.trim_final_newlines, + })] + } + ScenarioScriptStep::ExpectFormatting(step) => { + vec![ScenarioStep::ExpectFormatting(ExpectFormattingStep { + id: registry.claim(RequestKind::Formatting, step.request.as_str())?, + result: step.result, + })] + } + ScenarioScriptStep::RequestRangeFormatting(step) => { + vec![ScenarioStep::RequestRangeFormatting( + RequestRangeFormattingStep { + id: registry + .allocate(RequestKind::RangeFormatting, step.request_name)?, + uri: file_uri(base_dir, &step.file), + range: marker_store.resolve_range( + &step.file, + step.range, + "requestRangeFormatting", + )?, + tab_size: step.tab_size, + insert_spaces: step.insert_spaces, + trim_trailing_whitespace: step.trim_trailing_whitespace, + insert_final_newline: step.insert_final_newline, + trim_final_newlines: step.trim_final_newlines, + }, + )] + } + ScenarioScriptStep::ExpectRangeFormatting(step) => { + vec![ScenarioStep::ExpectRangeFormatting( + ExpectRangeFormattingStep { + id: registry + .claim(RequestKind::RangeFormatting, step.request.as_str())?, + result: step.result, + }, + )] + } + ScenarioScriptStep::RequestSemanticTokensFull(step) => { + let request_id = + registry.allocate(RequestKind::SemanticTokensFull, step.request_name)?; + semantic_tokens_full_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestSemanticTokensFull( + RequestSemanticTokensFullStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + }, + )] + } + ScenarioScriptStep::ExpectSemanticTokensFull(step) => { + let request_id = + registry.claim(RequestKind::SemanticTokensFull, step.request.as_str())?; + let file = semantic_tokens_full_request_files.get(&request_id).ok_or( + CompileScenarioError::MissingRequestFileContext { + step: "expectSemanticTokensFull", + request_id, + }, + )?; + vec![ScenarioStep::ExpectSemanticTokensFull( + ExpectSemanticTokensFullStep { + id: request_id, + result: step + .result + .map(|result| { + result.resolve_full( + &marker_store, + file, + "expectSemanticTokensFull.result", + ) + }) + .transpose()?, + }, + )] + } + ScenarioScriptStep::RequestSemanticTokensRange(step) => { + let request_id = + registry.allocate(RequestKind::SemanticTokensRange, step.request_name)?; + semantic_tokens_range_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestSemanticTokensRange( + RequestSemanticTokensRangeStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + range: marker_store.resolve_range( + &step.file, + step.range, + "requestSemanticTokensRange", + )?, + }, + )] + } + ScenarioScriptStep::ExpectSemanticTokensRange(step) => { + let request_id = + registry.claim(RequestKind::SemanticTokensRange, step.request.as_str())?; + let file = semantic_tokens_range_request_files.get(&request_id).ok_or( + CompileScenarioError::MissingRequestFileContext { + step: "expectSemanticTokensRange", + request_id, + }, + )?; + vec![ScenarioStep::ExpectSemanticTokensRange( + ExpectSemanticTokensRangeStep { + id: request_id, + result: step + .result + .map(|result| { + result.resolve_range( + &marker_store, + file, + "expectSemanticTokensRange.result", + ) + }) + .transpose()?, + }, + )] + } + ScenarioScriptStep::RequestInlayHints(step) => { + let request_id = + registry.allocate(RequestKind::InlayHints, step.request_name)?; + inlay_hint_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestInlayHints(RequestInlayHintsStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + range: marker_store.resolve_range( + &step.file, + step.range, + "requestInlayHints", + )?, + })] + } + ScenarioScriptStep::ExpectInlayHints(step) => { + let request_id = + registry.claim(RequestKind::InlayHints, step.request.as_str())?; + let file = inlay_hint_request_files.get(&request_id).ok_or({ + CompileScenarioError::MissingRequestFileContext { + step: "expectInlayHints", + request_id, + } + })?; + let result = step + .result + .map(|hints| { + hints + .into_iter() + .map(|hint| { + hint.resolve(&marker_store, file, "expectInlayHints.result") + }) + .collect::, _>>() + }) + .transpose()?; + vec![ScenarioStep::ExpectInlayHints(ExpectInlayHintsStep { + id: request_id, + result, + })] + } + ScenarioScriptStep::RequestDocumentSymbol(step) => { + let request_id = + registry.allocate(RequestKind::DocumentSymbol, step.request_name)?; + document_symbol_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestDocumentSymbol( + RequestDocumentSymbolStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + }, + )] + } + ScenarioScriptStep::ExpectDocumentSymbol(step) => { + let request_id = + registry.claim(RequestKind::DocumentSymbol, step.request.as_str())?; + let file = document_symbol_request_files.get(&request_id).ok_or( + CompileScenarioError::MissingRequestFileContext { + step: "expectDocumentSymbol", + request_id, + }, + )?; + vec![ScenarioStep::ExpectDocumentSymbol( + ExpectDocumentSymbolStep { + id: request_id, + result: step.resolve_result( + &marker_store, + file, + "expectDocumentSymbol.result", + )?, + }, + )] + } + ScenarioScriptStep::RequestWorkspaceSymbol(step) => { + vec![ScenarioStep::RequestWorkspaceSymbol( + RequestWorkspaceSymbolStep { + id: registry + .allocate(RequestKind::WorkspaceSymbol, step.request_name)?, + query: step.query, + }, + )] + } + ScenarioScriptStep::ExpectWorkspaceSymbol(step) => { + vec![ScenarioStep::ExpectWorkspaceSymbol( + ExpectWorkspaceSymbolStep { + id: registry + .claim(RequestKind::WorkspaceSymbol, step.request.as_str())?, + result: step + .result + .map(|result| { + result.resolve( + base_dir, + &marker_store, + "expectWorkspaceSymbol.result", + ) + }) + .transpose()?, + }, + )] + } + ScenarioScriptStep::RequestCodeLens(step) => { + let request_id = registry.allocate(RequestKind::CodeLens, step.request_name)?; + code_lens_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestCodeLens(RequestCodeLensStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + })] + } + ScenarioScriptStep::ExpectCodeLens(step) => { + let request_id = + registry.claim(RequestKind::CodeLens, step.request.as_str())?; + let default_file = code_lens_request_files.get(&request_id).map(String::as_str); + vec![ScenarioStep::ExpectCodeLens(ExpectCodeLensStep { + id: request_id, + result: step + .result + .map(|result| { + result + .into_iter() + .map(|lens| { + lens.resolve( + base_dir, + &marker_store, + default_file, + "expectCodeLens.result", + ) + }) + .collect::, _>>() + }) + .transpose()?, + })] + } + ScenarioScriptStep::RequestExecuteCommand(step) => { + let arguments = step + .arguments + .into_iter() + .map(|argument| { + resolve_file_uri_shorthand_json( + argument, + base_dir, + "requestExecuteCommand.arguments", + ) + }) + .collect::, _>>()?; + vec![ScenarioStep::RequestExecuteCommand( + RequestExecuteCommandStep { + id: registry + .allocate(RequestKind::ExecuteCommand, step.request_name)?, + command: step.command, + arguments, + }, + )] + } + ScenarioScriptStep::ExpectExecuteCommand(step) => { + vec![ScenarioStep::ExpectExecuteCommand( + ExpectExecuteCommandStep { + id: registry + .claim(RequestKind::ExecuteCommand, step.request.as_str())?, + result: step.result, + }, + )] + } + ScenarioScriptStep::RequestExecuteCodeLens(step) => { + let code_lens_request_id = + registry.resolve(RequestKind::CodeLens, step.request.as_str())?; + vec![ScenarioStep::RequestExecuteCodeLens( + RequestExecuteCodeLensStep { + id: registry + .allocate(RequestKind::ExecuteCodeLens, step.request_name)?, + code_lens_request_id, + index: step.index, + }, + )] + } + ScenarioScriptStep::ExpectExecuteCodeLens(step) => { + vec![ScenarioStep::ExpectExecuteCodeLens( + ExpectExecuteCodeLensStep { + id: registry + .claim(RequestKind::ExecuteCodeLens, step.request.as_str())?, + result: step.result, + }, + )] + } + ScenarioScriptStep::RequestCustom(step) => { + let params = resolve_file_uri_shorthand_json( + step.params, + base_dir, + "requestCustom.params", + )?; + vec![ScenarioStep::RequestCustom(RequestCustomStep { + id: registry.allocate(RequestKind::Custom, step.request_name)?, + method: step.method, + params, + })] + } + ScenarioScriptStep::ExpectCustom(step) => { + vec![ScenarioStep::ExpectCustom(ExpectCustomStep { + id: registry.claim(RequestKind::Custom, step.request.as_str())?, + result: step.result, + })] + } + ScenarioScriptStep::ExpectDiagnostics(step) => { + let diagnostics = step + .diagnostics + .into_iter() + .map(|diagnostic| { + diagnostic.resolve_with_file( + &marker_store, + &step.file, + "expectDiagnostics.diagnostics", + ) + }) + .collect::, _>>()?; + vec![ScenarioStep::ExpectDiagnostics(ExpectDiagnosticsStep { + uri: file_uri(base_dir, &step.file), + diagnostics, + })] + } + ScenarioScriptStep::DiagnosticsSettled(step) => { + vec![ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { + timeout_ms: step.timeout_ms, + idle_ms: step.idle_ms, + })] + } + }; + steps.extend(compiled); + } + + Ok(Scenario::new(steps)) + } +} + +/// Lower a `create` script step into `writeFile` + `open` scenario steps. +/// +/// This also seeds marker tracking for each created file, so all subsequent +/// marker-based references resolve against the cleaned text content. +fn compile_create_step( + step: CreateScriptStep, + base_dir: &Path, + marker_store: &mut MarkerStore, +) -> Result, CompileScenarioError> { + if step.files.is_empty() { + return Err(CompileScenarioError::CreateRequiresFiles); + } + + let mut steps = Vec::with_capacity(step.files.len() * 2); + for (relative_path, text) in &step.files { + let text = marker_store.register_full_text(relative_path, text.clone(), "create.files")?; + steps.push(ScenarioStep::WriteFile(WriteFileStep { + path: file_path(base_dir, relative_path), + text, + })); + } + + let files_to_open = match step.open { + Some(paths) => paths, + None => step.files.keys().cloned().collect(), + }; + for relative_path in files_to_open { + if !step.files.contains_key(&relative_path) { + return Err(CompileScenarioError::CreateOpenReferencesUnknownFile { + relative_path, + expected: step.files.keys().cloned().collect(), + }); + } + let text = marker_store + .full_text(&relative_path) + .ok_or_else(|| CompileScenarioError::CreateMissingParsedText { + relative_path: relative_path.clone(), + })? + .to_string(); + steps.push(ScenarioStep::Open(OpenStep { + uri: file_uri(base_dir, &relative_path), + text, + language_id: step.language_id.clone(), + version: step.version, + })); + } + + Ok(steps) +} + +/// Raw YAML `step` union, deserialized before semantic validation. +/// +/// These payload types intentionally mirror the DSL surface. Cross-step checks +/// (alias matching, marker existence, request ordering) happen in `compile`. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(tag = "step", rename_all = "camelCase")] +enum ScenarioScriptStep { + Open(OpenScriptStep), + Create(CreateScriptStep), + ChangeFull(ChangeFullScriptStep), + ChangeIncremental(ChangeIncrementalScriptStep), + Save(SaveScriptStep), + Close(CloseScriptStep), + Config(ConfigScriptStep), + WriteFile(WriteFileScriptStep), + DeleteFile(DeleteFileScriptStep), + NotifyWatchedFiles(NotifyWatchedFilesScriptStep), + RequestCodeAction(RequestCodeActionScriptStep), + ExpectCodeAction(ExpectCodeActionScriptStep), + RequestReferences(RequestReferencesScriptStep), + ExpectReferences(ExpectReferencesScriptStep), + RequestDefinition(RequestDefinitionScriptStep), + ExpectDefinition(ExpectDefinitionScriptStep), + RequestDeclaration(RequestDeclarationScriptStep), + ExpectDeclaration(ExpectDeclarationScriptStep), + RequestTypeDefinition(RequestTypeDefinitionScriptStep), + ExpectTypeDefinition(ExpectTypeDefinitionScriptStep), + RequestPrepareRename(RequestPrepareRenameScriptStep), + ExpectPrepareRename(ExpectPrepareRenameScriptStep), + RequestRename(RequestRenameScriptStep), + ExpectRename(ExpectRenameScriptStep), + RequestHover(RequestHoverScriptStep), + ExpectHover(ExpectHoverScriptStep), + RequestSignatureHelp(RequestSignatureHelpScriptStep), + ExpectSignatureHelp(ExpectSignatureHelpScriptStep), + ExpectTypes(ExpectTypesScriptStep), + RequestCompletion(RequestCompletionScriptStep), + ExpectCompletion(ExpectCompletionScriptStep), + RequestFormatting(RequestFormattingScriptStep), + ExpectFormatting(ExpectFormattingScriptStep), + RequestRangeFormatting(RequestRangeFormattingScriptStep), + ExpectRangeFormatting(ExpectFormattingScriptStep), + RequestSemanticTokensFull(RequestSemanticTokensFullScriptStep), + ExpectSemanticTokensFull(ExpectSemanticTokensFullScriptStep), + RequestSemanticTokensRange(RequestSemanticTokensRangeScriptStep), + ExpectSemanticTokensRange(ExpectSemanticTokensRangeScriptStep), + RequestInlayHints(RequestInlayHintsScriptStep), + ExpectInlayHints(ExpectInlayHintsScriptStep), + RequestDocumentSymbol(RequestDocumentSymbolScriptStep), + ExpectDocumentSymbol(ExpectDocumentSymbolScriptStep), + RequestWorkspaceSymbol(RequestWorkspaceSymbolScriptStep), + ExpectWorkspaceSymbol(ExpectWorkspaceSymbolScriptStep), + RequestCodeLens(RequestCodeLensScriptStep), + ExpectCodeLens(ExpectCodeLensScriptStep), + RequestExecuteCommand(RequestExecuteCommandScriptStep), + ExpectExecuteCommand(ExpectExecuteCommandScriptStep), + RequestExecuteCodeLens(RequestExecuteCodeLensScriptStep), + ExpectExecuteCodeLens(ExpectExecuteCodeLensScriptStep), + RequestCustom(RequestCustomScriptStep), + ExpectCustom(ExpectCustomScriptStep), + ExpectDiagnostics(ExpectDiagnosticsScriptStep), + DiagnosticsSettled(DiagnosticsSettledScriptStep), +} + +/// `create` creates one or more files and optionally opens a subset of them. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct CreateScriptStep { + files: BTreeMap, + #[serde(default)] + open: Option>, + #[serde(default = "default_language_id")] + language_id: String, + #[serde(default = "default_open_version")] + version: i32, +} + +/// `open` seeds marker-aware text for one file and opens it in the runner. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct OpenScriptStep { + file: String, + text: String, + #[serde(default = "default_language_id")] + language_id: String, + #[serde(default = "default_open_version")] + version: i32, +} + +/// `changeFull` replaces full document text and updates marker tracking. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ChangeFullScriptStep { + file: String, + text: String, + version: i32, +} + +/// `changeIncremental` applies a ranged edit with marker-aware range parsing. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ChangeIncrementalScriptStep { + file: String, + #[serde(flatten)] + range: RangeInput, + text: String, + version: i32, +} + +/// `save` can optionally send explicit text (for save-with-content scenarios). +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct SaveScriptStep { + file: String, + text: Option, +} + +/// `close` closes one open document URI. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct CloseScriptStep { + file: String, +} + +/// `config` mutates runtime server settings. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ConfigScriptStep { + settings: serde_json::Value, +} + +/// `writeFile` mutates workspace files without opening a document. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct WriteFileScriptStep { + path: String, + text: String, +} + +/// `deleteFile` removes a workspace file. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct DeleteFileScriptStep { + path: String, +} + +/// `notifyWatchedFiles` sends synthetic file-watch notifications. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct NotifyWatchedFilesScriptStep { + changes: Vec, +} + +/// One watched-file change entry. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct WatchedFileChangeScriptStep { + path: String, + #[serde(rename = "type")] + change_type: ScenarioFileChangeType, +} + +// Request/expect payloads generally follow the same aliasing contract: +// `request*` steps may define `as`, and matching `expect*` steps must reference +// that alias via `request`. +/// `requestCodeAction` optionally names the request and captures context. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestCodeActionScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(flatten)] + range: RangeInput, + #[serde(default)] + diagnostics: Vec, + #[serde(default)] + only: Option>, +} + +/// `expectCodeAction` matches by explicit request alias. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectCodeActionScriptStep { + request: String, + result: Option>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestReferencesScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, + #[serde(default)] + include_declaration: bool, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectReferencesScriptStep { + request: String, + result: Option>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestDefinitionScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectDefinitionScriptStep { + request: String, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestDeclarationScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectDeclarationScriptStep { + request: String, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestTypeDefinitionScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectTypeDefinitionScriptStep { + request: String, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestPrepareRenameScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestRenameScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, + new_name: String, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectRenameScriptStep { + request: String, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestHoverScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestSignatureHelpScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +/// `expectTypes` shorthand for a list of marker-position type assertions. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectTypesScriptStep { + file: String, + checks: Vec, +} + +/// One `expectTypes.checks` entry, expanded to requestHover + expectHover. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct TypeExpectationScriptStep { + #[serde(default)] + at: Option, + #[serde(rename = "type")] + expected_type: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestCompletionScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestFormattingScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default = "default_formatting_tab_size")] + tab_size: u32, + #[serde(default = "default_formatting_insert_spaces")] + insert_spaces: bool, + #[serde(default)] + trim_trailing_whitespace: Option, + #[serde(default)] + insert_final_newline: Option, + #[serde(default)] + trim_final_newlines: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestRangeFormattingScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(flatten)] + range: RangeInput, + #[serde(default = "default_formatting_tab_size")] + tab_size: u32, + #[serde(default = "default_formatting_insert_spaces")] + insert_spaces: bool, + #[serde(default)] + trim_trailing_whitespace: Option, + #[serde(default)] + insert_final_newline: Option, + #[serde(default)] + trim_final_newlines: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestSemanticTokensFullScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectSemanticTokensFullScriptStep { + request: String, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestSemanticTokensRangeScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(flatten)] + range: RangeInput, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectSemanticTokensRangeScriptStep { + request: String, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestInlayHintsScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(flatten)] + range: RangeInput, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectInlayHintsScriptStep { + request: String, + result: Option>, +} + +// `InlayHintInput` defines custom equality semantics (JSON-form comparison for +// selected fields), so this step keeps equality aligned with that behavior. +impl PartialEq for ExpectInlayHintsScriptStep { + fn eq(&self, other: &Self) -> bool { + self.request == other.request && self.result == other.result + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestDocumentSymbolScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestWorkspaceSymbolScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + query: String, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectWorkspaceSymbolScriptStep { + request: String, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestCodeLensScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectCodeLensScriptStep { + request: String, + result: Option>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestExecuteCommandScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + command: String, + #[serde(default)] + arguments: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestExecuteCodeLensScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + request: String, + index: usize, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectExecuteCodeLensScriptStep { + request: String, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestCustomScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + method: String, + #[serde(default)] + params: serde_json::Value, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectCustomScriptStep { + request: String, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectDiagnosticsScriptStep { + file: String, + #[serde(default)] + diagnostics: Vec, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct DiagnosticsSettledScriptStep { + /// Total time budget waiting for diagnostics to settle. + #[serde(default = "default_timeout_ms")] + timeout_ms: u64, + /// Required quiet window with no diagnostics updates before success. + #[serde(default = "default_idle_ms")] + idle_ms: u64, +} + +// DSL defaults chosen to mirror common client behavior in tests. +const fn default_open_version() -> i32 { + 1 +} + +fn default_language_id() -> String { + "jsonnet".to_string() +} + +const fn default_formatting_tab_size() -> u32 { + 2 +} + +const fn default_formatting_insert_spaces() -> bool { + true +} + +const fn default_timeout_ms() -> u64 { + 1_000 +} + +const fn default_idle_ms() -> u64 { + 50 +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs new file mode 100644 index 00000000..75c2bd6e --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs @@ -0,0 +1,1318 @@ +//! Shorthand input decoders used by scenario compilation. +//! +//! The scenario YAML supports both full LSP JSON objects and concise shorthand +//! forms. This module resolves shorthand into concrete `lsp_types` structures, +//! including marker-aware expansions such as `{ positionOf: "name" }` and +//! `{ rangeOf: "name" }` inside expectation payloads. + +use std::{ + collections::{BTreeMap, HashMap}, + path::Path, + str::FromStr, +}; + +use jrsonnet_lsp_types::{SemanticTokenModifierName, SemanticTokenTypeName}; +use lsp_types::{ + CodeAction, CodeActionKind, CodeActionOrCommand, CodeLens, CompletionResponse, Diagnostic, + DiagnosticSeverity, DocumentSymbolResponse, GotoDefinitionResponse, InlayHint, Location, + NumberOrString, PrepareRenameResponse, SemanticTokens, SemanticTokensRangeResult, + SemanticTokensResult, SignatureHelp, SymbolInformation, SymbolKind, TextEdit, WorkspaceEdit, + WorkspaceSymbolResponse, +}; +use serde::{de::DeserializeOwned, Deserialize}; +use thiserror::Error; + +use super::{ + markers::{ + MarkerError, MarkerStore, PositionFieldInput, PositionSpec, RangeFieldInput, RangeInput, + }, + paths::file_uri, +}; +use crate::{ + scenario::HoverSectionExpectation, + semantic_tokens::{encode_semantic_tokens, semantic_modifiers, ExpectedSemanticToken}, +}; + +type UriParseError = ::Err; + +#[derive(Debug, Error)] +pub enum InputError { + #[error(transparent)] + Markers(#[from] MarkerError), + #[error("{context}: parse URI for '{path}': {source}")] + UriParse { + context: String, + path: String, + source: UriParseError, + }, + #[error("{context}: resolve edit for '{path}': {source}")] + ResolveEditForPath { + context: String, + path: String, + source: Box, + }, + #[error("{context}: decode {target}: {source}")] + JsonDecode { + context: String, + target: &'static str, + source: serde_json::Error, + }, + #[error("{context}: decode workspace symbol shorthand into SymbolInformation: {source}")] + DecodeWorkspaceSymbolShorthand { + context: String, + source: serde_json::Error, + }, + #[error("{context}: serialize {target} at {path}: {source}")] + JsonSerialize { + context: String, + target: &'static str, + path: String, + source: serde_json::Error, + }, + #[error("{context}: command argument `file` must be a string")] + CommandArgumentFileMustBeString { context: String }, + #[error("{context}: `{path}.file` must be a string")] + FileUriShorthandFieldMustBeString { context: String, path: String }, + #[error("{context}: unknown semantic token type '{token_type}' at {location}")] + UnknownSemanticTokenType { + context: String, + token_type: String, + location: String, + }, + #[error("{context}: unknown semantic token modifier '{modifier}' at {location}")] + UnknownSemanticTokenModifier { + context: String, + modifier: String, + location: String, + }, + #[error("{context}: tokensByMarker[{index}] marker '{marker}' spans multiple lines")] + SemanticTokenMarkerSpansMultipleLines { + context: String, + index: usize, + marker: String, + }, + #[error("{context}: tokensByMarker[{index}] marker '{marker}' has invalid range")] + SemanticTokenMarkerInvalidRange { + context: String, + index: usize, + marker: String, + }, + #[error("{context}: tokensByMarker[{index}] marker '{marker}' resolves to an empty range")] + SemanticTokenMarkerEmptyRange { + context: String, + index: usize, + marker: String, + }, + #[error("{context}: {path} cannot be empty")] + MarkerNameCannotBeEmpty { context: String, path: String }, + #[error("{context}: {path} must be a string marker name, got {actual}")] + MarkerNameMustBeString { + context: String, + path: String, + actual: serde_json::Value, + }, +} + +/// Accept either full `Diagnostic` JSON or concise shorthand fields. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum DiagnosticInput { + Shorthand(DiagnosticShorthandInput), + Full(Diagnostic), +} + +impl DiagnosticInput { + /// Resolve a diagnostic input using marker context from a specific file. + pub(super) fn resolve_with_file( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(diagnostic) => Ok(diagnostic), + Self::Shorthand(shorthand) => shorthand.resolve(marker_store, file, context), + } + } + + /// Resolve a diagnostic input without marker context. + pub(super) fn resolve_without_markers(self, context: &str) -> Result { + match self { + Self::Full(diagnostic) => Ok(diagnostic), + Self::Shorthand(shorthand) => shorthand.resolve_without_markers(context), + } + } + + fn resolve_with_optional_file( + self, + marker_store: &MarkerStore, + file: Option<&str>, + context: &str, + ) -> Result { + match file { + Some(file) => self.resolve_with_file(marker_store, file, context), + None => self.resolve_without_markers(context), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct DiagnosticShorthandInput { + #[serde(flatten)] + range: RangeInput, + #[serde(default)] + severity: Option, + #[serde(default)] + code: Option, + #[serde(default)] + source: Option, + message: String, +} + +impl DiagnosticShorthandInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + let range = marker_store.resolve_range(file, self.range, context)?; + Ok(Diagnostic { + range, + severity: self.severity.map(DiagnosticSeverityInput::resolve), + code: self.code.map(DiagnosticCodeInput::resolve), + code_description: None, + source: self.source, + message: self.message, + related_information: None, + tags: None, + data: None, + }) + } + + fn resolve_without_markers(self, context: &str) -> Result { + let range = self.range.resolve_range(context)?; + Ok(Diagnostic { + range, + severity: self.severity.map(DiagnosticSeverityInput::resolve), + code: self.code.map(DiagnosticCodeInput::resolve), + code_description: None, + source: self.source, + message: self.message, + related_information: None, + tags: None, + data: None, + }) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] +#[serde(rename_all = "lowercase")] +enum DiagnosticSeverityInput { + Error, + Warning, + Information, + Hint, +} + +impl DiagnosticSeverityInput { + const fn resolve(self) -> DiagnosticSeverity { + match self { + Self::Error => DiagnosticSeverity::ERROR, + Self::Warning => DiagnosticSeverity::WARNING, + Self::Information => DiagnosticSeverity::INFORMATION, + Self::Hint => DiagnosticSeverity::HINT, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +enum DiagnosticCodeInput { + Number(i32), + String(String), +} + +impl DiagnosticCodeInput { + fn resolve(self) -> NumberOrString { + match self { + Self::Number(value) => NumberOrString::Number(value), + Self::String(value) => NumberOrString::String(value), + } + } +} + +/// Accept either full `CodeActionOrCommand` payloads or shorthand code actions. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum CodeActionOrCommandInput { + Shorthand(CodeActionShorthandInput), + Full(Box), +} + +impl CodeActionOrCommandInput { + /// Resolve shorthand/full code action representation into an LSP response item. + pub(super) fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + default_file: Option<&str>, + context: &str, + ) -> Result { + match self { + Self::Full(action) => Ok(*action), + Self::Shorthand(action) => { + action.resolve(base_dir, marker_store, default_file, context) + } + } + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct CodeActionShorthandInput { + title: String, + #[serde(default)] + kind: Option, + #[serde(default, rename = "isPreferred")] + is_preferred: Option, + #[serde(default)] + diagnostics: Vec, + #[serde(default)] + edits: BTreeMap>, +} + +impl CodeActionShorthandInput { + fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + default_file: Option<&str>, + context: &str, + ) -> Result { + let diagnostics = self + .diagnostics + .into_iter() + .map(|diagnostic| { + diagnostic.resolve_with_optional_file( + marker_store, + default_file, + "expectCodeAction.result.diagnostics", + ) + }) + .collect::, _>>()?; + let diagnostics = (!diagnostics.is_empty()).then_some(diagnostics); + + let edit = if self.edits.is_empty() { + None + } else { + let changes = + self.edits + .into_iter() + .map(|(relative_path, edits)| { + let uri: lsp_types::Uri = file_uri(base_dir, &relative_path) + .parse() + .map_err(|source| InputError::UriParse { + context: context.to_string(), + path: relative_path.clone(), + source, + })?; + let edits = edits + .into_iter() + .map(|edit| { + edit.resolve_with_markers( + marker_store, + &relative_path, + "expectCodeAction.result.edits", + ) + .map_err(|error| match error { + InputError::Markers(source) => InputError::ResolveEditForPath { + context: context.to_string(), + path: relative_path.clone(), + source: Box::new(source), + }, + other => other, + }) + }) + .collect::, _>>()?; + Ok::<_, InputError>((uri, edits)) + }) + .collect::, _>>()?; + + Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }) + }; + + Ok(CodeActionOrCommand::CodeAction(CodeAction { + title: self.title, + kind: self.kind, + diagnostics, + edit, + command: None, + is_preferred: self.is_preferred, + disabled: None, + data: None, + })) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct TextEditInput { + #[serde(flatten)] + range: RangeInput, + replace: String, +} + +impl TextEditInput { + /// Resolve text edit range using marker references from one file. + pub(super) fn resolve_with_markers( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + Ok(TextEdit { + range: marker_store.resolve_range(file, self.range, context)?, + new_text: self.replace, + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct LocationInput { + file: String, + #[serde(flatten)] + range: RangeInput, +} + +impl LocationInput { + /// Resolve shorthand location into an absolute file URI and concrete range. + pub(super) fn resolve_location( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + let uri: lsp_types::Uri = + file_uri(base_dir, &self.file) + .parse() + .map_err(|error| InputError::UriParse { + context: context.to_string(), + path: self.file.clone(), + source: error, + })?; + let range = marker_store.resolve_range(&self.file, self.range, context)?; + Ok(Location { uri, range }) + } +} + +/// Accept either scalar/array shorthand locations or full LSP response payload. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum GotoDefinitionResponseInput { + Single(LocationInput), + Many(Vec), + Full(GotoDefinitionResponse), +} + +impl GotoDefinitionResponseInput { + /// Resolve shorthand definition/declaration/typeDefinition response payloads. + pub(super) fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + match self { + Self::Full(result) => Ok(result), + Self::Single(location) => location + .resolve_location(base_dir, marker_store, context) + .map(GotoDefinitionResponse::Scalar), + Self::Many(locations) => locations + .into_iter() + .map(|location| location.resolve_location(base_dir, marker_store, context)) + .collect::, _>>() + .map(GotoDefinitionResponse::Array), + } + } +} + +/// Accept either shorthand `edits` mapping or full `WorkspaceEdit`. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum WorkspaceEditInput { + Shorthand(WorkspaceEditShorthandInput), + Full(WorkspaceEdit), +} + +impl WorkspaceEditInput { + /// Resolve shorthand workspace edits into concrete `WorkspaceEdit`. + pub(super) fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + match self { + Self::Full(edit) => Ok(edit), + Self::Shorthand(shorthand) => shorthand.resolve(base_dir, marker_store, context), + } + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct WorkspaceEditShorthandInput { + #[serde(default)] + edits: BTreeMap>, +} + +impl WorkspaceEditShorthandInput { + fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + let changes = self + .edits + .into_iter() + .map(|(relative_path, edits)| { + let uri: lsp_types::Uri = + file_uri(base_dir, &relative_path) + .parse() + .map_err(|source| InputError::UriParse { + context: context.to_string(), + path: relative_path.clone(), + source, + })?; + let edits = edits + .into_iter() + .map(|edit| edit.resolve_with_markers(marker_store, &relative_path, context)) + .collect::, _>>()?; + Ok::<_, InputError>((uri, edits)) + }) + .collect::, _>>()?; + + Ok(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }) + } +} + +/// Accept either shorthand symbol entries or a full LSP workspace symbol response. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum WorkspaceSymbolResponseInput { + Shorthand(WorkspaceSymbolResponseShorthandInput), + Full(WorkspaceSymbolResponse), +} + +impl WorkspaceSymbolResponseInput { + /// Resolve shorthand workspace symbol responses into concrete LSP payloads. + pub(super) fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + match self { + Self::Full(response) => Ok(response), + Self::Shorthand(shorthand) => shorthand.resolve(base_dir, marker_store, context), + } + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct WorkspaceSymbolResponseShorthandInput { + symbols: Vec, +} + +impl WorkspaceSymbolResponseShorthandInput { + fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + let symbols = self + .symbols + .into_iter() + .map(|symbol| symbol.resolve(base_dir, marker_store, context)) + .collect::, _>>()?; + Ok(WorkspaceSymbolResponse::Flat(symbols)) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct WorkspaceSymbolInput { + name: String, + kind: SymbolKind, + file: String, + #[serde(flatten)] + range: RangeInput, + #[serde(default)] + container_name: Option, +} + +impl WorkspaceSymbolInput { + fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + let uri: lsp_types::Uri = + file_uri(base_dir, &self.file) + .parse() + .map_err(|error| InputError::UriParse { + context: context.to_string(), + path: self.file.clone(), + source: error, + })?; + // Build via JSON so field names/types track the wire representation directly, + // including deprecated-but-still-used `containerName`. + let value = serde_json::json!({ + "name": self.name, + "kind": self.kind, + "tags": null, + "location": { + "uri": uri, + "range": marker_store.resolve_range(&self.file, self.range, context)?, + }, + "containerName": self.container_name, + }); + serde_json::from_value(value).map_err(|source| InputError::DecodeWorkspaceSymbolShorthand { + context: context.to_string(), + source, + }) + } +} + +/// Accept either shorthand code-lens rows or full `CodeLens` values. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum CodeLensInput { + Shorthand(CodeLensShorthandInput), + Full(CodeLens), +} + +impl CodeLensInput { + /// Resolve shorthand code lens values into concrete `CodeLens`. + pub(super) fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + default_file: Option<&str>, + context: &str, + ) -> Result { + match self { + Self::Full(lens) => Ok(lens), + Self::Shorthand(shorthand) => { + shorthand.resolve(base_dir, marker_store, default_file, context) + } + } + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct CodeLensShorthandInput { + #[serde(flatten)] + range: RangeInput, + #[serde(default)] + command: Option, +} + +impl CodeLensShorthandInput { + fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + default_file: Option<&str>, + context: &str, + ) -> Result { + let range = match default_file { + Some(file) => marker_store.resolve_range(file, self.range, context)?, + None => self.range.resolve_range(context)?, + }; + let command = self + .command + .map(|command| command.resolve(base_dir, context)) + .transpose()?; + Ok(CodeLens { + range, + command, + data: None, + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct CodeLensCommandInput { + title: String, + command: String, + #[serde(default)] + arguments: Vec, +} + +impl CodeLensCommandInput { + fn resolve(self, base_dir: &Path, context: &str) -> Result { + let arguments = self + .arguments + .into_iter() + .map(|argument| argument.resolve(base_dir, context)) + .collect::, _>>()?; + Ok(lsp_types::Command { + title: self.title, + command: self.command, + arguments: (!arguments.is_empty()).then_some(arguments), + }) + } +} + +/// YAML command-argument convenience: +/// - `{ file: "" }` becomes a file URI string. +/// - any other JSON value is passed through unchanged. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +enum CommandArgumentInput { + File { file: String }, + Value(serde_json::Value), +} + +impl CommandArgumentInput { + fn resolve(self, base_dir: &Path, context: &str) -> Result { + match self { + Self::File { file } => Ok(serde_json::Value::String(file_uri(base_dir, &file))), + Self::Value(value) => { + if let Some(file) = value.as_object().and_then(|object| object.get("file")) { + if let Some(file) = file.as_str() { + return Ok(serde_json::Value::String(file_uri(base_dir, file))); + } + return Err(InputError::CommandArgumentFileMustBeString { + context: context.to_string(), + }); + } + Ok(value) + } + } + } +} + +/// Resolve recursive `{ file: "" }` URI shorthands in JSON. +/// +/// Only object values with exactly one `file` key are rewritten to URI strings. +pub(super) fn resolve_file_uri_shorthand_json( + value: serde_json::Value, + base_dir: &Path, + context: &str, +) -> Result { + resolve_file_uri_shorthand_json_at(value, base_dir, context, "$") +} + +fn resolve_file_uri_shorthand_json_at( + value: serde_json::Value, + base_dir: &Path, + context: &str, + path: &str, +) -> Result { + match value { + serde_json::Value::Object(object) => { + if object.len() == 1 && object.contains_key("file") { + let Some(file) = object.get("file") else { + return Ok(serde_json::Value::Object(object)); + }; + let Some(file) = file.as_str() else { + return Err(InputError::FileUriShorthandFieldMustBeString { + context: context.to_string(), + path: path.to_string(), + }); + }; + return Ok(serde_json::Value::String(file_uri(base_dir, file))); + } + + let mut resolved = serde_json::Map::with_capacity(object.len()); + for (key, child) in object { + let child_path = format!("{path}.{key}"); + resolved.insert( + key, + resolve_file_uri_shorthand_json_at(child, base_dir, context, &child_path)?, + ); + } + Ok(serde_json::Value::Object(resolved)) + } + serde_json::Value::Array(values) => { + let mut resolved = Vec::with_capacity(values.len()); + for (index, child) in values.into_iter().enumerate() { + let child_path = format!("{path}[{index}]"); + resolved.push(resolve_file_uri_shorthand_json_at( + child, + base_dir, + context, + &child_path, + )?); + } + Ok(serde_json::Value::Array(resolved)) + } + _ => Ok(value), + } +} + +/// Accept either a full semantic-token result or marker-driven shorthand. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum SemanticTokensResultInput { + Full(SemanticTokensResult), + Shorthand(SemanticTokensShorthandInput), +} + +impl SemanticTokensResultInput { + /// Resolve expected semantic-tokens payload for `semanticTokens/full`. + pub(super) fn resolve_full( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(result) => Ok(result), + Self::Shorthand(shorthand) => Ok(SemanticTokensResult::Tokens(shorthand.resolve( + marker_store, + file, + context, + )?)), + } + } + + /// Resolve expected semantic-tokens payload for `semanticTokens/range`. + pub(super) fn resolve_range( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(SemanticTokensResult::Tokens(tokens)) => { + Ok(SemanticTokensRangeResult::Tokens(tokens)) + } + Self::Full(SemanticTokensResult::Partial(partial)) => { + Ok(SemanticTokensRangeResult::Partial(partial)) + } + Self::Shorthand(shorthand) => Ok(SemanticTokensRangeResult::Tokens( + shorthand.resolve(marker_store, file, context)?, + )), + } + } +} + +/// Marker-only semantic token shorthand used in YAML expectations. +/// +/// Absolute token tuples are intentionally not supported here to keep tests +/// resilient to unrelated token-position churn. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct SemanticTokensShorthandInput { + #[serde(default, rename = "tokensByMarker")] + tokens_by_marker: Vec, +} + +impl SemanticTokensShorthandInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + let absolute_tokens = self + .tokens_by_marker + .into_iter() + .enumerate() + .map(|(index, token)| token.resolve(context, marker_store, file, index)) + .collect::, _>>()?; + Ok(encode_semantic_tokens(absolute_tokens)) + } +} + +/// One expected semantic token anchored to a named marker range. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct SemanticTokenByMarkerInput { + marker: String, + #[serde(rename = "type")] + token_type: SemanticTokenTypeInput, + #[serde(default)] + modifiers: Vec, +} + +impl SemanticTokenByMarkerInput { + fn resolve( + self, + context: &str, + marker_store: &MarkerStore, + file: &str, + index: usize, + ) -> Result { + let marker = self.marker; + let token_type = self + .token_type + .resolve_at(context, &format!("tokensByMarker[{index}].type"))?; + let modifiers = self + .modifiers + .into_iter() + .enumerate() + .map(|(modifier_index, modifier)| { + modifier.resolve_at( + context, + &format!("tokensByMarker[{index}].modifiers[{modifier_index}]"), + ) + }) + .collect::, _>>()?; + let range = marker_store.resolve_named_range(file, &marker, context)?; + if range.start.line != range.end.line { + return Err(InputError::SemanticTokenMarkerSpansMultipleLines { + context: context.to_string(), + index, + marker, + }); + } + if range.end.character < range.start.character { + return Err(InputError::SemanticTokenMarkerInvalidRange { + context: context.to_string(), + index, + marker, + }); + } + let len = range.end.character - range.start.character; + if len == 0 { + return Err(InputError::SemanticTokenMarkerEmptyRange { + context: context.to_string(), + index, + marker, + }); + } + + Ok(ExpectedSemanticToken::new( + range.start.line, + range.start.character, + len, + token_type, + semantic_modifiers(&modifiers), + )) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(transparent)] +struct SemanticTokenTypeInput(String); + +impl SemanticTokenTypeInput { + fn resolve_at( + self, + context: &str, + location: &str, + ) -> Result { + SemanticTokenTypeName::from_str(&self.0).map_err(|()| { + InputError::UnknownSemanticTokenType { + context: context.to_string(), + token_type: self.0, + location: location.to_string(), + } + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(transparent)] +struct SemanticTokenModifierInput(String); + +impl SemanticTokenModifierInput { + fn resolve_at( + self, + context: &str, + location: &str, + ) -> Result { + SemanticTokenModifierName::from_str(&self.0).map_err(|()| { + InputError::UnknownSemanticTokenModifier { + context: context.to_string(), + modifier: self.0, + location: location.to_string(), + } + }) + } +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub(super) struct InlayHintInput { + #[serde(flatten)] + position: PositionFieldInput, + label: lsp_types::InlayHintLabel, + #[serde(default)] + kind: Option, + #[serde(default)] + text_edits: Option>, + #[serde(default)] + tooltip: Option, + #[serde(default)] + padding_left: Option, + #[serde(default)] + padding_right: Option, + #[serde(default)] + data: Option, +} + +impl InlayHintInput { + /// Resolve shorthand inlay hint fields, including marker-based positions/ranges. + pub(super) fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + let text_edits = self + .text_edits + .map(|edits| { + edits + .into_iter() + .map(|edit| { + edit.resolve(marker_store, file, "expectInlayHints.result.textEdits") + }) + .collect::, _>>() + }) + .transpose()?; + Ok(InlayHint { + position: self.position.resolve(marker_store, file, context)?, + label: self.label, + kind: self.kind, + text_edits, + tooltip: self.tooltip, + padding_left: self.padding_left, + padding_right: self.padding_right, + data: self.data, + }) + } +} + +impl PartialEq for InlayHintInput { + // `InlayHintLabel` and `InlayHintTooltip` enums do not expose stable structural + // comparison helpers for all variants, so compare their JSON forms instead. + fn eq(&self, other: &Self) -> bool { + self.position == other.position + && self.kind == other.kind + && self.text_edits == other.text_edits + && self.padding_left == other.padding_left + && self.padding_right == other.padding_right + && self.data == other.data + && serde_json::to_value(&self.label).ok() == serde_json::to_value(&other.label).ok() + && serde_json::to_value(&self.tooltip).ok() == serde_json::to_value(&other.tooltip).ok() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +struct InlayHintTextEditInput { + #[serde(flatten)] + range: RangeFieldInput, + new_text: String, +} + +impl InlayHintTextEditInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + Ok(TextEdit { + range: self.range.resolve(marker_store, file, context)?, + new_text: self.new_text, + }) + } +} + +/// Resolve marker references in arbitrary JSON, then deserialize into `T`. +fn resolve_marker_json_input( + value: serde_json::Value, + marker_store: &MarkerStore, + file: &str, + context: &str, +) -> Result +where + T: DeserializeOwned, +{ + let resolved = resolve_marker_references_json(value, marker_store, file, context, "$")?; + serde_json::from_value(resolved).map_err(|source| InputError::JsonDecode { + context: context.to_string(), + target: "marker-expanded value", + source, + }) +} + +/// Recursively rewrite `{ positionOf: ... }` and `{ rangeOf: ... }` objects. +/// +/// Only single-key objects with these keys are treated as marker directives. +/// Objects with additional keys are treated as normal JSON objects and their +/// children are traversed recursively. +fn resolve_marker_references_json( + value: serde_json::Value, + marker_store: &MarkerStore, + file: &str, + context: &str, + path: &str, +) -> Result { + match value { + serde_json::Value::Object(mut object) => { + if object.len() == 1 { + if let Some(marker) = object.remove("positionOf") { + let marker_name = + marker_name_from_value(marker, context, &format!("{path}.positionOf"))?; + let position = marker_store.resolve_position_spec( + file, + PositionSpec::Marker(marker_name), + context, + )?; + return serde_json::to_value(position).map_err(|source| { + InputError::JsonSerialize { + context: context.to_string(), + target: "position", + path: path.to_string(), + source, + } + }); + } + if let Some(marker) = object.remove("rangeOf") { + let marker_name = + marker_name_from_value(marker, context, &format!("{path}.rangeOf"))?; + let range = marker_store.resolve_named_range(file, &marker_name, context)?; + return serde_json::to_value(range).map_err(|source| { + InputError::JsonSerialize { + context: context.to_string(), + target: "range", + path: path.to_string(), + source, + } + }); + } + } + + let mut resolved = serde_json::Map::with_capacity(object.len()); + for (key, child) in object { + let child_path = format!("{path}.{key}"); + let resolved_child = resolve_marker_references_json( + child, + marker_store, + file, + context, + &child_path, + )?; + resolved.insert(key, resolved_child); + } + Ok(serde_json::Value::Object(resolved)) + } + serde_json::Value::Array(items) => items + .into_iter() + .enumerate() + .map(|(index, item)| { + let child_path = format!("{path}[{index}]"); + resolve_marker_references_json(item, marker_store, file, context, &child_path) + }) + .collect::, _>>() + .map(serde_json::Value::Array), + other => Ok(other), + } +} + +/// Validate and extract marker names from JSON directive values. +fn marker_name_from_value( + value: serde_json::Value, + context: &str, + path: &str, +) -> Result { + match value { + serde_json::Value::String(name) if !name.is_empty() => Ok(name), + serde_json::Value::String(_) => Err(InputError::MarkerNameCannotBeEmpty { + context: context.to_string(), + path: path.to_string(), + }), + other => Err(InputError::MarkerNameMustBeString { + context: context.to_string(), + path: path.to_string(), + actual: other, + }), + } +} + +/// Accept either direct LSP payload or marker-directive JSON for completion. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum CompletionResponseInput { + Full(CompletionResponse), + WithMarkers(serde_json::Value), +} + +impl CompletionResponseInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(response) => Ok(response), + Self::WithMarkers(value) => { + resolve_marker_json_input(value, marker_store, file, context) + } + } + } +} + +/// Accept either direct LSP payload or marker-directive JSON for document symbols. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum DocumentSymbolResponseInput { + Full(DocumentSymbolResponse), + WithMarkers(serde_json::Value), +} + +impl DocumentSymbolResponseInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(response) => Ok(response), + Self::WithMarkers(value) => { + resolve_marker_json_input(value, marker_store, file, context) + } + } + } +} + +/// Accept either direct LSP payload or marker-directive JSON for prepare-rename. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum PrepareRenameResponseInput { + Full(PrepareRenameResponse), + WithMarkers(serde_json::Value), +} + +impl PrepareRenameResponseInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(response) => Ok(response), + Self::WithMarkers(value) => { + resolve_marker_json_input(value, marker_store, file, context) + } + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectHoverScriptStep { + pub(super) request: String, + pub(super) result: Option>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectSignatureHelpScriptStep { + pub(super) request: String, + pub(super) result: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectFormattingScriptStep { + pub(super) request: String, + pub(super) result: Option>, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectDocumentSymbolScriptStep { + pub(super) request: String, + pub(super) result: Option, +} + +impl ExpectDocumentSymbolScriptStep { + /// Resolve marker-aware expected result for `expectDocumentSymbol`. + pub(super) fn resolve_result( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result, InputError> { + self.result + .map(|result| result.resolve(marker_store, file, context)) + .transpose() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectExecuteCommandScriptStep { + pub(super) request: String, + pub(super) result: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectPrepareRenameScriptStep { + pub(super) request: String, + pub(super) result: Option, +} + +impl ExpectPrepareRenameScriptStep { + /// Resolve marker-aware expected result for `expectPrepareRename`. + pub(super) fn resolve_result( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result, InputError> { + self.result + .map(|result| result.resolve(marker_store, file, context)) + .transpose() + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectCompletionScriptStep { + pub(super) request: String, + pub(super) result: Option, +} + +impl ExpectCompletionScriptStep { + /// Resolve marker-aware expected result for `expectCompletion`. + pub(super) fn resolve_result( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result, InputError> { + self.result + .map(|result| result.resolve(marker_store, file, context)) + .transpose() + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs new file mode 100644 index 00000000..adc95730 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs @@ -0,0 +1,1238 @@ +//! Inline marker parser and resolver for scenario source text. +//! +//! Marker syntax supported in file text: +//! - `[[name:text]]` records a named range covering `text`. +//! - `((name:before|after))` records a named cursor position between +//! `before` and `after` (stored as a zero-width range). +//! +//! Parsing is a two-step process: +//! 1. Build a small Rowan syntax tree for nested marker constructs. +//! 2. Lower that tree to plain text (marker wrappers removed) and a marker map +//! with character offsets into the plain text. +//! +//! `MarkerStore` keeps one parsed document per file and updates marker offsets +//! across full and incremental text changes during scenario compilation. + +use std::collections::HashMap; + +use lsp_types::{Position, Range}; +use rowan::{GreenNodeBuilder, Language, NodeOrToken}; +use serde::Deserialize; +use thiserror::Error; + +#[derive(Debug, Error)] +#[error("{context}: {kind}")] +pub struct MarkerError { + context: String, + kind: Box, +} + +#[derive(Debug, Error)] +enum MarkerErrorKind { + #[error("specify either `range` or shorthand (`at` + `text`/`len`), not both")] + ConflictingRangeAndShorthand, + #[error("marker range '{name}' requires file context")] + MarkerRangeRequiresFileContext { name: String }, + #[error("missing range, provide `range` or shorthand (`at` + `text`/`len`)")] + MissingRange, + #[error("shorthand cannot include both `text` and `len`")] + ShorthandTextAndLenBothSet, + #[error("shorthand requires one of `text` or `len`")] + ShorthandRequiresTextOrLen, + #[error("marker position '{name}' requires file context")] + MarkerPositionRequiresFileContext { name: String }, + #[error("missing position, provide `positionOf`")] + MissingPositionOf, + #[error("missing range, provide `rangeOf`")] + MissingRangeOf, + #[error("incremental range start is after end for file '{file}'")] + IncrementalRangeStartAfterEnd { file: String }, + #[error("{bound} offset out of bounds in '{file}'")] + FileOffsetOutOfBounds { file: String, bound: OffsetBound }, + #[error("incremental text width overflow for '{file}'")] + IncrementalTextWidthOverflow { file: String }, + #[error("duplicate marker name '{name}' in file '{file}'")] + DuplicateMarkerNameInFile { file: String, name: String }, + #[error("marker '{name}' start overflow")] + MarkerStartOverflow { name: String }, + #[error("marker '{name}' end overflow")] + MarkerEndOverflow { name: String }, + #[error("missing position, provide `at`")] + MissingPositionAt, + #[error("shorthand `text` length does not fit in u32")] + ShorthandTextLengthTooLong, + #[error("range end overflow")] + RangeEndOverflow, + #[error("no tracked text for file '{file}' while resolving marker '{marker_name}'")] + NoTrackedTextForMarker { file: String, marker_name: String }, + #[error("file '{file}' has no parsed text/markers; define it in `create`, `open`, `writeFile`, or `changeFull` first")] + FileHasNoParsedMarkers { file: String }, + #[error("unknown marker '{marker_name}' in file '{file}', available markers: {available:?}")] + UnknownMarker { + file: String, + marker_name: String, + available: Vec, + }, + #[error("text length overflow")] + TextLengthOverflow, + #[error("duplicate marker name '{name}' in one text block")] + DuplicateMarkerNameInTextBlock { name: String }, + #[error("unexpected marker syntax node {kind:?} while translating marker tree")] + UnexpectedSyntaxNode { kind: MarkerSyntaxKind }, + #[error("malformed range marker '{marker_name}', missing body")] + MalformedRangeMarkerMissingBody { marker_name: String }, + #[error("malformed cursor marker '{marker_name}', missing before segment")] + MalformedCursorMarkerMissingBefore { marker_name: String }, + #[error("malformed cursor marker '{marker_name}', missing after segment")] + MalformedCursorMarkerMissingAfter { marker_name: String }, + #[error("unexpected marker syntax token {kind:?} while translating marker tree")] + UnexpectedSyntaxToken { kind: MarkerSyntaxKind }, + #[error("malformed marker node, missing marker name")] + MalformedMarkerNodeMissingName, + #[error("marker parser index overflow")] + ParserIndexOverflow, + #[error("unterminated range marker starting at byte {start}")] + UnterminatedRangeMarker { start: usize }, + #[error("cursor marker '{marker_name}' must include exactly one top-level `|`")] + CursorMarkerMustIncludeOnePipe { marker_name: String }, + #[error("unterminated cursor marker starting at byte {start}")] + UnterminatedCursorMarker { start: usize }, + #[error("marker parser unexpectedly reached EOF")] + ParserUnexpectedEof, + #[error("marker name cannot be empty")] + MarkerNameEmpty, + #[error("offset overflow")] + OffsetOverflow, + #[error("line overflow")] + LineOverflow, + #[error("character overflow")] + CharacterOverflow, + #[error("marker offset {offset} is out of bounds (text has {text_chars} chars)")] + MarkerOffsetOutOfBounds { offset: usize, text_chars: usize }, + #[error("position ({line}, {character}) is out of bounds for current text")] + PositionOutOfBounds { line: u32, character: u32 }, + #[error("marker offset underflow while shifting")] + MarkerOffsetUnderflowWhileShifting, + #[error("marker shift overflow")] + MarkerShiftOverflow, + #[error("marker offset overflow while shifting")] + MarkerOffsetOverflowWhileShifting, +} + +#[derive(Debug, Clone, Copy)] +enum OffsetBound { + Start, + End, +} + +impl std::fmt::Display for OffsetBound { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Start => f.write_str("start"), + Self::End => f.write_str("end"), + } + } +} + +impl MarkerError { + fn new(context: impl Into, kind: MarkerErrorKind) -> Self { + Self { + context: context.into(), + kind: Box::new(kind), + } + } +} + +/// Position selector used by scenario script fields like `at`. +/// +/// Current DSL only supports named marker references. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +pub(super) enum PositionSpec { + Marker(String), +} + +/// Range selector used by scenario script fields like `range`. +/// +/// Current DSL only supports named marker references. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +pub(super) enum RangeSpec { + Marker(String), +} + +/// Range input shape used in request and expectation shorthand fields. +/// +/// Accepted forms: +/// - `range: ` +/// - shorthand using `at: ` with either `text` or `len` +#[derive(Debug, Clone, PartialEq, Eq, Default, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct RangeInput { + #[serde(default)] + pub(super) range: Option, + #[serde(default)] + pub(super) at: Option, + #[serde(default)] + pub(super) text: Option, + #[serde(default)] + pub(super) len: Option, +} + +impl RangeInput { + /// Resolve a range without file context. + /// + /// This is only valid for non-marker data. Marker references require file + /// context and must be resolved through [`MarkerStore`]. + pub(super) fn resolve_range(self, context: &str) -> Result { + if let Some(range_spec) = self.range { + if self.at.is_some() || self.text.is_some() || self.len.is_some() { + return Err(MarkerError::new( + context, + MarkerErrorKind::ConflictingRangeAndShorthand, + )); + } + return match range_spec { + RangeSpec::Marker(name) => Err(MarkerError::new( + context, + MarkerErrorKind::MarkerRangeRequiresFileContext { name }, + )), + }; + } + + let Some(start_spec) = self.at else { + return Err(MarkerError::new(context, MarkerErrorKind::MissingRange)); + }; + + match (self.text, self.len) { + (Some(_), Some(_)) => { + return Err(MarkerError::new( + context, + MarkerErrorKind::ShorthandTextAndLenBothSet, + )); + } + (None, None) => { + return Err(MarkerError::new( + context, + MarkerErrorKind::ShorthandRequiresTextOrLen, + )); + } + _ => {} + } + + match start_spec { + PositionSpec::Marker(name) => Err(MarkerError::new( + context, + MarkerErrorKind::MarkerPositionRequiresFileContext { name }, + )), + } + } +} + +/// Marker-backed position object field input (`positionOf`). +#[derive(Debug, Clone, PartialEq, Eq, Default, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct PositionFieldInput { + #[serde(default, rename = "positionOf")] + position_of: Option, +} + +impl PositionFieldInput { + /// Resolve `positionOf` against one file in the marker store. + pub(super) fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + self.position_of.map_or_else( + || { + Err(MarkerError::new( + context, + MarkerErrorKind::MissingPositionOf, + )) + }, + |name| marker_store.resolve_position_spec(file, PositionSpec::Marker(name), context), + ) + } +} + +/// Marker-backed range object field input (`rangeOf`). +#[derive(Debug, Clone, PartialEq, Eq, Default, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct RangeFieldInput { + #[serde(default, rename = "rangeOf")] + range_of: Option, +} + +impl RangeFieldInput { + /// Resolve `rangeOf` against one file in the marker store. + pub(super) fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + self.range_of.map_or_else( + || Err(MarkerError::new(context, MarkerErrorKind::MissingRangeOf)), + |marker| marker_store.resolve_named_range(file, &marker, context), + ) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct MarkerRangeOffsets { + start: usize, + end: usize, +} + +impl MarkerRangeOffsets { + const fn new(start: usize, end: usize) -> Self { + Self { start, end } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct MarkerDocument { + text: String, + markers: HashMap, +} + +#[derive(Debug, Default)] +pub(super) struct MarkerStore { + documents: HashMap, +} + +impl MarkerStore { + /// Create an empty marker store. + pub(super) fn new() -> Self { + Self { + documents: HashMap::new(), + } + } + + /// Parse and register full file text with inline markers. + /// + /// Returns cleaned text (marker wrappers removed) that should be sent to the + /// LSP server for this file. + pub(super) fn register_full_text( + &mut self, + file: &str, + raw_text: String, + context: &str, + ) -> Result { + let (text, markers) = parse_marked_text(&raw_text, context)?; + self.documents.insert( + file.to_string(), + MarkerDocument { + text: text.clone(), + markers, + }, + ); + Ok(text) + } + + /// Apply an incremental edit and update tracked marker offsets. + /// + /// Existing markers before the replaced range are retained as-is. Markers + /// after the range are shifted by the edit delta. Markers overlapping the + /// replaced region are dropped. Markers in inserted text are parsed and + /// inserted at their shifted positions. + pub(super) fn register_incremental_text( + &mut self, + file: &str, + range: Range, + raw_text: String, + context: &str, + ) -> Result { + let (text, inserted_markers) = parse_marked_text(&raw_text, context)?; + let Some(document) = self.documents.get_mut(file) else { + return Ok(text); + }; + + let start_offset = position_to_offset(&document.text, range.start, context)?; + let end_offset = position_to_offset(&document.text, range.end, context)?; + if start_offset > end_offset { + return Err(MarkerError::new( + context, + MarkerErrorKind::IncrementalRangeStartAfterEnd { + file: file.to_string(), + }, + )); + } + + let start_byte = + char_offset_to_byte_offset(&document.text, start_offset).ok_or_else(|| { + MarkerError::new( + context, + MarkerErrorKind::FileOffsetOutOfBounds { + file: file.to_string(), + bound: OffsetBound::Start, + }, + ) + })?; + let end_byte = char_offset_to_byte_offset(&document.text, end_offset).ok_or_else(|| { + MarkerError::new( + context, + MarkerErrorKind::FileOffsetOutOfBounds { + file: file.to_string(), + bound: OffsetBound::End, + }, + ) + })?; + + let mut next_text = String::with_capacity( + start_byte + text.len() + document.text.len().saturating_sub(end_byte), + ); + next_text.push_str(&document.text[..start_byte]); + next_text.push_str(&text); + next_text.push_str(&document.text[end_byte..]); + + let replaced_width = end_offset.saturating_sub(start_offset); + let inserted_width = text.chars().count(); + let delta = isize::try_from(inserted_width) + .and_then(|inserted| { + isize::try_from(replaced_width).map(|replaced| inserted - replaced) + }) + .map_err(|_| { + MarkerError::new( + context, + MarkerErrorKind::IncrementalTextWidthOverflow { + file: file.to_string(), + }, + ) + })?; + + let mut next_markers = + HashMap::with_capacity(document.markers.len() + inserted_markers.len()); + for (name, marker) in &document.markers { + if marker.end <= start_offset { + next_markers.insert(name.clone(), marker.clone()); + continue; + } + if marker.start >= end_offset { + let shifted_start = shift_offset(marker.start, delta, context)?; + let shifted_end = shift_offset(marker.end, delta, context)?; + next_markers.insert( + name.clone(), + MarkerRangeOffsets::new(shifted_start, shifted_end), + ); + } + } + + for (name, marker) in inserted_markers { + if next_markers.contains_key(&name) { + return Err(MarkerError::new( + context, + MarkerErrorKind::DuplicateMarkerNameInFile { + file: file.to_string(), + name, + }, + )); + } + let start = marker.start.checked_add(start_offset).ok_or_else(|| { + MarkerError::new( + context, + MarkerErrorKind::MarkerStartOverflow { name: name.clone() }, + ) + })?; + let end = marker.end.checked_add(start_offset).ok_or_else(|| { + MarkerError::new( + context, + MarkerErrorKind::MarkerEndOverflow { name: name.clone() }, + ) + })?; + next_markers.insert(name, MarkerRangeOffsets::new(start, end)); + } + + document.text = next_text; + document.markers = next_markers; + Ok(text) + } + + /// Resolve an optional `at` field into a concrete LSP position. + pub(super) fn resolve_position( + &self, + file: &str, + at: Option, + context: &str, + ) -> Result { + at.map_or_else( + || { + Err(MarkerError::new( + context, + MarkerErrorKind::MissingPositionAt, + )) + }, + |spec| self.resolve_position_spec(file, spec, context), + ) + } + + /// Resolve a marker-aware range input for one file. + pub(super) fn resolve_range( + &self, + file: &str, + input: RangeInput, + context: &str, + ) -> Result { + if let Some(range_spec) = input.range { + if input.at.is_some() || input.text.is_some() || input.len.is_some() { + return Err(MarkerError::new( + context, + MarkerErrorKind::ConflictingRangeAndShorthand, + )); + } + return match range_spec { + RangeSpec::Marker(name) => self.resolve_named_range(file, &name, context), + }; + } + + let Some(start_spec) = input.at else { + return Err(MarkerError::new(context, MarkerErrorKind::MissingRange)); + }; + + match (input.text, input.len) { + (Some(text), None) => { + let width = u32::try_from(text.chars().count()).map_err(|_| { + MarkerError::new(context, MarkerErrorKind::ShorthandTextLengthTooLong) + })?; + let start = self.resolve_position_spec(file, start_spec, context)?; + let end_character = start + .character + .checked_add(width) + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::RangeEndOverflow))?; + Ok(Range { + start, + end: Position { + line: start.line, + character: end_character, + }, + }) + } + (None, Some(len)) => { + let start = self.resolve_position_spec(file, start_spec, context)?; + let end_character = start + .character + .checked_add(len) + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::RangeEndOverflow))?; + Ok(Range { + start, + end: Position { + line: start.line, + character: end_character, + }, + }) + } + (Some(_), Some(_)) => Err(MarkerError::new( + context, + MarkerErrorKind::ShorthandTextAndLenBothSet, + )), + (None, None) => match start_spec { + PositionSpec::Marker(name) => self.resolve_named_range(file, &name, context), + }, + } + } + + /// Resolve a named marker into an LSP range. + pub(super) fn resolve_named_range( + &self, + file: &str, + marker_name: &str, + context: &str, + ) -> Result { + let marker = self.lookup_marker(file, marker_name, context)?; + let text = self.full_text(file).ok_or_else(|| { + MarkerError::new( + context, + MarkerErrorKind::NoTrackedTextForMarker { + file: file.to_string(), + marker_name: marker_name.to_string(), + }, + ) + })?; + let start = offset_to_position(text, marker.start, context)?; + let end = offset_to_position(text, marker.end, context)?; + Ok(Range { start, end }) + } + + /// Resolve a position selector into an LSP position. + pub(super) fn resolve_position_spec( + &self, + file: &str, + spec: PositionSpec, + context: &str, + ) -> Result { + match spec { + PositionSpec::Marker(name) => { + let marker = self.lookup_marker(file, &name, context)?; + let text = self.full_text(file).ok_or_else(|| { + MarkerError::new( + context, + MarkerErrorKind::NoTrackedTextForMarker { + file: file.to_string(), + marker_name: name.clone(), + }, + ) + })?; + offset_to_position(text, marker.start, context) + } + } + } + + /// Look up one marker range in one tracked file. + fn lookup_marker( + &self, + file: &str, + marker_name: &str, + context: &str, + ) -> Result<&MarkerRangeOffsets, MarkerError> { + let Some(document) = self.documents.get(file) else { + return Err(MarkerError::new( + context, + MarkerErrorKind::FileHasNoParsedMarkers { + file: file.to_string(), + }, + )); + }; + + document.markers.get(marker_name).ok_or_else(|| { + let available = document.markers.keys().cloned().collect::>(); + MarkerError::new( + context, + MarkerErrorKind::UnknownMarker { + file: file.to_string(), + marker_name: marker_name.to_string(), + available, + }, + ) + }) + } + + /// Return current cleaned text for a file, if tracked. + pub(super) fn full_text(&self, file: &str) -> Option<&str> { + self.documents + .get(file) + .map(|document| document.text.as_str()) + } + + /// Drop all tracked text/marker state for a file. + pub(super) fn remove(&mut self, file: &str) { + self.documents.remove(file); + } +} + +/// Parse inline marker syntax and return `(clean_text, marker_offsets)`. +fn parse_marked_text( + input: &str, + context: &str, +) -> Result<(String, HashMap), MarkerError> { + let root = MarkerSyntaxParser::new(input, context).parse()?; + let mut segment = ParsedSegment::default(); + append_marker_node_contents(&mut segment, &root, context)?; + Ok((segment.text, segment.markers)) +} + +#[derive(Debug, Default)] +struct ParsedSegment { + text: String, + char_len: usize, + markers: HashMap, +} + +impl ParsedSegment { + /// Append plain text and advance tracked character length. + fn push_text(&mut self, text: &str, context: &str) -> Result<(), MarkerError> { + self.text.push_str(text); + self.char_len = self + .char_len + .checked_add(text.chars().count()) + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::TextLengthOverflow))?; + Ok(()) + } + + /// Insert a unique named marker range for this parsed segment. + fn insert_marker( + &mut self, + name: String, + start: usize, + end: usize, + context: &str, + ) -> Result<(), MarkerError> { + if self.markers.contains_key(&name) { + return Err(MarkerError::new( + context, + MarkerErrorKind::DuplicateMarkerNameInTextBlock { name }, + )); + } + self.markers + .insert(name, MarkerRangeOffsets::new(start, end)); + Ok(()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[repr(u16)] +enum MarkerSyntaxKind { + Root, + RangeMarker, + CursorMarker, + RangeBody, + CursorBefore, + CursorAfter, + Text, + MarkerName, + OpenRange, + CloseRange, + OpenCursor, + CloseCursor, + Colon, + Pipe, + Error, +} + +impl MarkerSyntaxKind { + const fn into_raw(self) -> u16 { + self as u16 + } + + const fn from_raw(raw: u16) -> Self { + match raw { + 0 => Self::Root, + 1 => Self::RangeMarker, + 2 => Self::CursorMarker, + 3 => Self::RangeBody, + 4 => Self::CursorBefore, + 5 => Self::CursorAfter, + 6 => Self::Text, + 7 => Self::MarkerName, + 8 => Self::OpenRange, + 9 => Self::CloseRange, + 10 => Self::OpenCursor, + 11 => Self::CloseCursor, + 12 => Self::Colon, + 13 => Self::Pipe, + _ => Self::Error, + } + } +} + +impl From for rowan::SyntaxKind { + fn from(value: MarkerSyntaxKind) -> Self { + Self(value.into_raw()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +enum MarkerLanguage {} + +impl Language for MarkerLanguage { + type Kind = MarkerSyntaxKind; + + fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind { + MarkerSyntaxKind::from_raw(raw.0) + } + + fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind { + kind.into() + } +} + +type MarkerSyntaxNode = rowan::SyntaxNode; +type MarkerSyntaxToken = rowan::SyntaxToken; + +/// Append translated content of all children from one syntax node. +fn append_marker_node_contents( + target: &mut ParsedSegment, + node: &MarkerSyntaxNode, + context: &str, +) -> Result<(), MarkerError> { + for child in node.children_with_tokens() { + match child { + NodeOrToken::Node(child_node) => append_marker_node(target, &child_node, context)?, + NodeOrToken::Token(child_token) => append_marker_token(target, &child_token, context)?, + } + } + Ok(()) +} + +/// Translate one syntax node into output text + marker ranges. +fn append_marker_node( + target: &mut ParsedSegment, + node: &MarkerSyntaxNode, + context: &str, +) -> Result<(), MarkerError> { + match node.kind() { + MarkerSyntaxKind::Root + | MarkerSyntaxKind::RangeBody + | MarkerSyntaxKind::CursorBefore + | MarkerSyntaxKind::CursorAfter => append_marker_node_contents(target, node, context), + MarkerSyntaxKind::RangeMarker => append_range_marker(target, node, context), + MarkerSyntaxKind::CursorMarker => append_cursor_marker(target, node, context), + kind => Err(MarkerError::new( + context, + MarkerErrorKind::UnexpectedSyntaxNode { kind }, + )), + } +} + +/// Translate a parsed range marker node and record its start/end character offsets. +fn append_range_marker( + target: &mut ParsedSegment, + node: &MarkerSyntaxNode, + context: &str, +) -> Result<(), MarkerError> { + let marker_name = marker_name_for_node(node, context)?; + let marker_start = target.char_len; + let mut body_seen = false; + + for child in node.children() { + if child.kind() == MarkerSyntaxKind::RangeBody { + body_seen = true; + append_marker_node_contents(target, &child, context)?; + } + } + + if !body_seen { + return Err(MarkerError::new( + context, + MarkerErrorKind::MalformedRangeMarkerMissingBody { marker_name }, + )); + } + + let marker_end = target.char_len; + target.insert_marker(marker_name, marker_start, marker_end, context) +} + +/// Translate a parsed cursor marker node and record a zero-width marker range. +fn append_cursor_marker( + target: &mut ParsedSegment, + node: &MarkerSyntaxNode, + context: &str, +) -> Result<(), MarkerError> { + let marker_name = marker_name_for_node(node, context)?; + let mut before = None::; + let mut after = None::; + + for child in node.children() { + match child.kind() { + MarkerSyntaxKind::CursorBefore => before = Some(child), + MarkerSyntaxKind::CursorAfter => after = Some(child), + _ => {} + } + } + + let before = if let Some(before) = before { + before + } else { + return Err(MarkerError::new( + context, + MarkerErrorKind::MalformedCursorMarkerMissingBefore { marker_name }, + )); + }; + let after = if let Some(after) = after { + after + } else { + return Err(MarkerError::new( + context, + MarkerErrorKind::MalformedCursorMarkerMissingAfter { marker_name }, + )); + }; + + append_marker_node_contents(target, &before, context)?; + let cursor = target.char_len; + append_marker_node_contents(target, &after, context)?; + target.insert_marker(marker_name, cursor, cursor, context) +} + +/// Translate one token from marker syntax tree into plain text stream. +fn append_marker_token( + target: &mut ParsedSegment, + token: &MarkerSyntaxToken, + context: &str, +) -> Result<(), MarkerError> { + match token.kind() { + MarkerSyntaxKind::Text => target.push_text(token.text(), context), + MarkerSyntaxKind::MarkerName + | MarkerSyntaxKind::OpenRange + | MarkerSyntaxKind::CloseRange + | MarkerSyntaxKind::OpenCursor + | MarkerSyntaxKind::CloseCursor + | MarkerSyntaxKind::Colon + | MarkerSyntaxKind::Pipe => Ok(()), + kind => Err(MarkerError::new( + context, + MarkerErrorKind::UnexpectedSyntaxToken { kind }, + )), + } +} + +/// Extract marker name token from a marker node. +fn marker_name_for_node(node: &MarkerSyntaxNode, context: &str) -> Result { + node.children_with_tokens() + .find_map(|element| match element { + NodeOrToken::Node(_) => None, + NodeOrToken::Token(token) => { + (token.kind() == MarkerSyntaxKind::MarkerName).then_some(token) + } + }) + .map(|token| token.text().to_string()) + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::MalformedMarkerNodeMissingName)) +} + +/// Recursive-descent parser that builds a Rowan tree for marker syntax. +struct MarkerSyntaxParser<'a> { + input: &'a str, + index: usize, + context: &'a str, + builder: GreenNodeBuilder<'static>, +} + +impl<'a> MarkerSyntaxParser<'a> { + /// Create a parser for one raw text block. + fn new(input: &'a str, context: &'a str) -> Self { + Self { + input, + index: 0, + context, + builder: GreenNodeBuilder::new(), + } + } + + /// Parse full input into a syntax tree rooted at `Root`. + fn parse(mut self) -> Result { + self.start_node(MarkerSyntaxKind::Root); + while !self.is_eof() { + self.parse_item()?; + } + self.finish_node(); + Ok(MarkerSyntaxNode::new_root(self.builder.finish())) + } + + /// Parse one top-level item, preferring marker constructs over raw text. + fn parse_item(&mut self) -> Result<(), MarkerError> { + if self.try_parse_range_marker()? { + return Ok(()); + } + if self.try_parse_cursor_marker()? { + return Ok(()); + } + + self.parse_text_token() + } + + /// Attempt to parse `[[name:...]]`. + /// + /// Returns `Ok(false)` if current cursor is not at a valid range marker. + fn try_parse_range_marker(&mut self) -> Result { + if !self.starts_with("[[") { + return Ok(false); + } + + let start = self.index; + let Some((name_width, marker_name)) = + self.peek_marker_name_and_colon(start.checked_add(2).ok_or_else(|| { + MarkerError::new(self.context, MarkerErrorKind::ParserIndexOverflow) + })?) + else { + return Ok(false); + }; + let marker_name = parse_marker_name(marker_name, self.context)?; + + self.start_node(MarkerSyntaxKind::RangeMarker); + self.emit_fixed_token(MarkerSyntaxKind::OpenRange, "[[")?; + self.emit_text_token(MarkerSyntaxKind::MarkerName, marker_name); + self.advance_bytes(name_width)?; + self.emit_fixed_token(MarkerSyntaxKind::Colon, ":")?; + self.start_node(MarkerSyntaxKind::RangeBody); + while !self.is_eof() { + if self.starts_with("]]") { + self.finish_node(); + self.emit_fixed_token(MarkerSyntaxKind::CloseRange, "]]")?; + self.finish_node(); + return Ok(true); + } + self.parse_item()?; + } + + Err(MarkerError::new( + self.context, + MarkerErrorKind::UnterminatedRangeMarker { start }, + )) + } + + /// Attempt to parse `((name:before|after))`. + /// + /// Cursor markers must contain exactly one top-level `|`. + /// Returns `Ok(false)` if current cursor is not at a valid cursor marker. + fn try_parse_cursor_marker(&mut self) -> Result { + if !self.starts_with("((") { + return Ok(false); + } + + let start = self.index; + let Some((name_width, marker_name)) = + self.peek_marker_name_and_colon(start.checked_add(2).ok_or_else(|| { + MarkerError::new(self.context, MarkerErrorKind::ParserIndexOverflow) + })?) + else { + return Ok(false); + }; + let marker_name = parse_marker_name(marker_name, self.context)?; + + self.start_node(MarkerSyntaxKind::CursorMarker); + self.emit_fixed_token(MarkerSyntaxKind::OpenCursor, "((")?; + self.emit_text_token(MarkerSyntaxKind::MarkerName, marker_name); + self.advance_bytes(name_width)?; + self.emit_fixed_token(MarkerSyntaxKind::Colon, ":")?; + + self.start_node(MarkerSyntaxKind::CursorBefore); + while !self.is_eof() { + if self.starts_with("))") { + return Err(MarkerError::new( + self.context, + MarkerErrorKind::CursorMarkerMustIncludeOnePipe { + marker_name: marker_name.to_string(), + }, + )); + } + if self.peek_char() == Some('|') { + self.finish_node(); + self.emit_fixed_token(MarkerSyntaxKind::Pipe, "|")?; + self.start_node(MarkerSyntaxKind::CursorAfter); + break; + } + self.parse_item()?; + } + + if self.is_eof() { + return Err(MarkerError::new( + self.context, + MarkerErrorKind::UnterminatedCursorMarker { start }, + )); + } + + while !self.is_eof() { + if self.starts_with("))") { + self.finish_node(); + self.emit_fixed_token(MarkerSyntaxKind::CloseCursor, "))")?; + self.finish_node(); + return Ok(true); + } + if self.peek_char() == Some('|') { + return Err(MarkerError::new( + self.context, + MarkerErrorKind::CursorMarkerMustIncludeOnePipe { + marker_name: marker_name.to_string(), + }, + )); + } + self.parse_item()?; + } + + Err(MarkerError::new( + self.context, + MarkerErrorKind::UnterminatedCursorMarker { start }, + )) + } + + /// Emit one non-marker UTF-8 scalar as plain text. + fn parse_text_token(&mut self) -> Result<(), MarkerError> { + let ch = self + .peek_char() + .ok_or_else(|| MarkerError::new(self.context, MarkerErrorKind::ParserUnexpectedEof))?; + let width = ch.len_utf8(); + let end = self + .index + .checked_add(width) + .ok_or_else(|| MarkerError::new(self.context, MarkerErrorKind::ParserIndexOverflow))?; + let text = &self.input[self.index..end]; + self.emit_text_token(MarkerSyntaxKind::Text, text); + self.index = end; + Ok(()) + } + + /// Look ahead for `:` from `start`. + /// + /// Returns consumed byte width and marker name slice if valid. + fn peek_marker_name_and_colon(&self, start: usize) -> Option<(usize, &'a str)> { + let tail = self.input.get(start..)?; + let mut chars = tail.char_indices(); + let (_, first) = chars.next()?; + if !is_marker_name_start(first) { + return None; + } + + for (offset, ch) in chars { + if is_marker_name_continue(ch) { + continue; + } + if ch == ':' { + let name = &tail[..offset]; + return Some((offset, name)); + } + return None; + } + + None + } + + fn start_node(&mut self, kind: MarkerSyntaxKind) { + self.builder.start_node(kind.into()); + } + + fn finish_node(&mut self) { + self.builder.finish_node(); + } + + fn emit_text_token(&mut self, kind: MarkerSyntaxKind, text: &str) { + self.builder.token(kind.into(), text); + } + + fn emit_fixed_token(&mut self, kind: MarkerSyntaxKind, text: &str) -> Result<(), MarkerError> { + self.emit_text_token(kind, text); + self.advance_bytes(text.len()) + } + + fn starts_with(&self, token: &str) -> bool { + self.input[self.index..].starts_with(token) + } + + fn peek_char(&self) -> Option { + self.input[self.index..].chars().next() + } + + fn advance_bytes(&mut self, bytes: usize) -> Result<(), MarkerError> { + self.index = self + .index + .checked_add(bytes) + .ok_or_else(|| MarkerError::new(self.context, MarkerErrorKind::ParserIndexOverflow))?; + Ok(()) + } + + fn is_eof(&self) -> bool { + self.index >= self.input.len() + } +} + +const fn is_marker_name_start(ch: char) -> bool { + ch.is_ascii_alphabetic() || ch == '_' +} + +const fn is_marker_name_continue(ch: char) -> bool { + ch.is_ascii_alphanumeric() || ch == '_' || ch == '-' +} + +/// Validate marker name lexical constraints. +fn parse_marker_name<'a>(name: &'a str, context: &str) -> Result<&'a str, MarkerError> { + if name.is_empty() { + return Err(MarkerError::new(context, MarkerErrorKind::MarkerNameEmpty)); + } + Ok(name) +} + +/// Convert a character offset into an LSP `(line, character)` position. +fn offset_to_position(text: &str, offset: usize, context: &str) -> Result { + let mut line = 0u32; + let mut character = 0u32; + let mut consumed = 0usize; + + for ch in text.chars() { + if consumed == offset { + return Ok(Position { line, character }); + } + consumed = consumed + .checked_add(1) + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::OffsetOverflow))?; + if ch == '\n' { + line = line + .checked_add(1) + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::LineOverflow))?; + character = 0; + } else { + character = character + .checked_add(1) + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::CharacterOverflow))?; + } + } + + if consumed == offset { + return Ok(Position { line, character }); + } + + Err(MarkerError::new( + context, + MarkerErrorKind::MarkerOffsetOutOfBounds { + offset, + text_chars: consumed, + }, + )) +} + +/// Convert an LSP `(line, character)` position into a character offset. +fn position_to_offset(text: &str, position: Position, context: &str) -> Result { + let mut line = 0u32; + let mut character = 0u32; + let mut offset = 0usize; + + for ch in text.chars() { + if line == position.line && character == position.character { + return Ok(offset); + } + offset = offset + .checked_add(1) + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::OffsetOverflow))?; + if ch == '\n' { + line = line + .checked_add(1) + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::LineOverflow))?; + character = 0; + } else { + character = character + .checked_add(1) + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::CharacterOverflow))?; + } + } + + if line == position.line && character == position.character { + return Ok(offset); + } + + Err(MarkerError::new( + context, + MarkerErrorKind::PositionOutOfBounds { + line: position.line, + character: position.character, + }, + )) +} + +/// Convert a character offset to byte offset in UTF-8 text. +fn char_offset_to_byte_offset(text: &str, target: usize) -> Option { + let mut offset = 0usize; + for (byte_index, _) in text.char_indices() { + if offset == target { + return Some(byte_index); + } + offset = offset.checked_add(1)?; + } + if offset == target { + return Some(text.len()); + } + None +} + +/// Shift an offset by signed delta with overflow/underflow checks. +fn shift_offset(value: usize, delta: isize, context: &str) -> Result { + if delta.is_negative() { + let amount = delta.unsigned_abs(); + value.checked_sub(amount).ok_or_else(|| { + MarkerError::new(context, MarkerErrorKind::MarkerOffsetUnderflowWhileShifting) + }) + } else { + let amount = usize::try_from(delta) + .map_err(|_| MarkerError::new(context, MarkerErrorKind::MarkerShiftOverflow))?; + value.checked_add(amount).ok_or_else(|| { + MarkerError::new(context, MarkerErrorKind::MarkerOffsetOverflowWhileShifting) + }) + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/mod.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/mod.rs new file mode 100644 index 00000000..0c2b52ed --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/mod.rs @@ -0,0 +1,20 @@ +//! YAML scenario script parsing and compilation pipeline. +//! +//! This module is split into focused internal stages: +//! - [`parse`]: YAML deserialization boundary and top-level error type. +//! - [`compile`]: converts script steps into executable [`crate::scenario::ScenarioStep`] values. +//! - [`markers`]: inline marker parsing (`[[name:text]]`, `((name:before|after))`) and resolution. +//! - [`inputs`]: typed shorthand decoders for LSP payloads used by expect steps. +//! - [`registry`]: request alias/id tracking so expects match the intended requests. +//! - [`paths`]: base-dir-relative path and file URI helpers. +//! +//! External callers should use [`parse_scenario_yaml`]. + +mod compile; +mod inputs; +mod markers; +mod parse; +mod paths; +mod registry; + +pub use parse::{parse_scenario_yaml, ParseScenarioError}; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs new file mode 100644 index 00000000..d49f7593 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs @@ -0,0 +1,152 @@ +//! Public entry point for parsing scenario YAML. +//! +//! `parse_scenario_yaml` does two explicit stages: +//! 1. Deserialize YAML into an internal script model (`ScenarioScript`). +//! 2. Compile that model into executable, strongly typed [`crate::scenario::Scenario`] data. +//! +//! The DSL is tuned for integration/e2e readability: +//! - file paths are relative to a caller-provided base directory +//! - `create.files` lets scenarios define their own workspace contents +//! - requests use optional aliases (`as`) instead of numeric IDs +//! - markers allow readable position/range references: +//! - `[[name:text]]` for named ranges +//! - `((name:before|after))` for named cursor positions +//! - expected payloads can reference markers via `positionOf` / `rangeOf` +//! +//! YAML parse errors and compile errors are separated in [`ParseScenarioError`]. + +use std::path::Path; + +use thiserror::Error; + +use super::compile::{CompileScenarioError, ScenarioScript}; + +/// Errors returned by [`parse_scenario_yaml`]. +#[derive(Debug, Error)] +pub enum ParseScenarioError { + /// The input was not valid scenario YAML. + #[error("parse scenario yaml: {source}")] + ParseYaml { + #[source] + source: serde_yaml_with_quirks::Error, + }, + /// YAML parsed, but failed semantic compilation to executable steps. + #[error(transparent)] + Compile(#[from] CompileScenarioError), +} + +/// Parse YAML scenario script and compile it to an executable scenario. +/// +/// `base_dir` is used to resolve relative `file`/`path` entries into absolute +/// on-disk paths and `file://` URIs inside the compiled scenario model. +pub fn parse_scenario_yaml( + input: &str, + base_dir: &Path, +) -> Result { + let parsed: ScenarioScript = serde_yaml_with_quirks::from_str(input) + .map_err(|source| ParseScenarioError::ParseYaml { source })?; + parsed.compile(base_dir).map_err(Into::into) +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use assert_matches::assert_matches; + use jrsonnet_lsp_types::SemanticTokenTypeName; + + use super::{parse_scenario_yaml, ParseScenarioError}; + use crate::{ + scenario::{ + ExpectSemanticTokensFullStep, OpenStep, RequestSemanticTokensFullStep, Scenario, + ScenarioStep, WriteFileStep, + }, + semantic_tokens::{encode_semantic_tokens, semantic_modifiers, ExpectedSemanticToken}, + }; + + fn main_file(tmp: &tempfile::TempDir) -> PathBuf { + tmp.path().join("main.jsonnet") + } + + fn main_uri(tmp: &tempfile::TempDir) -> String { + format!("file://{}", main_file(tmp).to_string_lossy()) + } + + #[test] + fn parse_compiles_tokens_by_marker_to_semantic_tokens() { + let tmp = tempfile::tempdir().expect("tempdir"); + let script = r" +steps: +- step: create + files: + main.jsonnet: |- + local [[nameTok:name]] = 1; + name + open: [main.jsonnet] +- step: requestSemanticTokensFull + as: tokens + file: main.jsonnet +- step: expectSemanticTokensFull + request: tokens + result: + tokensByMarker: + - marker: nameTok + type: variable +"; + let actual = parse_scenario_yaml(script, tmp.path()).expect("parse should succeed"); + let uri = main_uri(&tmp); + let expected = Scenario::new(vec![ + ScenarioStep::WriteFile(WriteFileStep { + path: main_file(&tmp).to_string_lossy().into_owned(), + text: "local name = 1;\nname".to_string(), + }), + ScenarioStep::Open(OpenStep { + uri: uri.clone(), + text: "local name = 1;\nname".to_string(), + language_id: "jsonnet".to_string(), + version: 1, + }), + ScenarioStep::RequestSemanticTokensFull(RequestSemanticTokensFullStep { id: 1, uri }), + ScenarioStep::ExpectSemanticTokensFull(ExpectSemanticTokensFullStep { + id: 1, + result: Some(lsp_types::SemanticTokensResult::Tokens( + encode_semantic_tokens(vec![ExpectedSemanticToken::new( + 0, + 6, + 4, + SemanticTokenTypeName::Variable, + semantic_modifiers(&[]), + )]), + )), + }), + ]); + assert_eq!(actual, expected); + } + + #[test] + fn parse_rejects_absolute_semantic_token_yaml_input() { + let tmp = tempfile::tempdir().expect("tempdir"); + let script = r" +steps: +- step: create + files: + main.jsonnet: |- + local [[nameTok:name]] = 1; + name + open: [main.jsonnet] +- step: requestSemanticTokensFull + as: tokens + file: main.jsonnet +- step: expectSemanticTokensFull + request: tokens + result: + tokens: + - line: 0 + start: 6 + len: 4 + type: variable +"; + let error = parse_scenario_yaml(script, tmp.path()).expect_err("parse should fail"); + assert_matches!(error, ParseScenarioError::ParseYaml { .. }); + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/paths.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/paths.rs new file mode 100644 index 00000000..92c4c12d --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/paths.rs @@ -0,0 +1,28 @@ +//! Path helpers for scenario compilation. +//! +//! Scenario YAML always expresses file locations relative to a test-specific +//! base directory. These helpers centralize conversion to absolute filesystem +//! paths and `file://` URI strings. + +use std::path::{Path, PathBuf}; + +/// Resolve a scenario-relative path to an absolute OS path string. +pub(super) fn file_path(base_dir: &Path, relative_path: &str) -> String { + resolve_path(base_dir, relative_path) + .to_string_lossy() + .into_owned() +} + +/// Resolve a scenario-relative path to an absolute `file://` URI string. +/// +/// This intentionally performs no canonicalization or URL escaping; callers use +/// this for temporary test workspaces with already-controlled relative paths. +pub(super) fn file_uri(base_dir: &Path, relative_path: &str) -> String { + let absolute_path = resolve_path(base_dir, relative_path); + format!("file://{}", absolute_path.to_string_lossy()) +} + +/// Join `base_dir` with a relative scenario path. +fn resolve_path(base_dir: &Path, relative_path: &str) -> PathBuf { + base_dir.join(relative_path) +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs new file mode 100644 index 00000000..ff28fd1b --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs @@ -0,0 +1,199 @@ +//! Request alias and ID bookkeeping for scenario compilation. +//! +//! Scenario scripts can name requests with `as: some_alias` and later refer to +//! them from `expect*` steps using `request: some_alias`. This registry maps +//! aliases to generated request IDs and tracks unmatched requests by kind. + +use std::{ + collections::{HashMap, VecDeque}, + fmt, +}; + +use thiserror::Error; + +/// LSP request kinds supported by the scenario DSL. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum RequestKind { + CodeAction, + References, + Definition, + Declaration, + TypeDefinition, + PrepareRename, + Rename, + Hover, + SignatureHelp, + Completion, + Formatting, + RangeFormatting, + SemanticTokensFull, + SemanticTokensRange, + InlayHints, + DocumentSymbol, + WorkspaceSymbol, + CodeLens, + ExecuteCommand, + ExecuteCodeLens, + Custom, +} + +impl fmt::Display for RequestKind { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str(self.label()) + } +} + +#[derive(Debug, Error)] +pub enum RequestRegistryError { + #[error("request id overflow")] + RequestIdOverflow, + #[error("duplicate request alias '{name}', request aliases must be unique")] + DuplicateAlias { name: String }, + #[error("unknown request alias '{name}' for {kind}, define it with `as`")] + UnknownAlias { name: String, kind: RequestKind }, + #[error("request alias '{name}' has kind {alias_kind}, cannot match {requested_kind}")] + AliasKindMismatch { + name: String, + alias_kind: RequestKind, + requested_kind: RequestKind, + }, + #[error("request alias '{name}' for {kind} was already matched")] + AliasAlreadyMatched { name: String, kind: RequestKind }, + #[error("failed to claim queued request alias '{name}' for {kind}")] + ClaimQueueCorrupted { name: String, kind: RequestKind }, +} + +impl RequestKind { + /// Canonical DSL step label for diagnostics and error messages. + const fn label(self) -> &'static str { + match self { + Self::CodeAction => "requestCodeAction", + Self::References => "requestReferences", + Self::Definition => "requestDefinition", + Self::Declaration => "requestDeclaration", + Self::TypeDefinition => "requestTypeDefinition", + Self::PrepareRename => "requestPrepareRename", + Self::Rename => "requestRename", + Self::Hover => "requestHover", + Self::SignatureHelp => "requestSignatureHelp", + Self::Completion => "requestCompletion", + Self::Formatting => "requestFormatting", + Self::RangeFormatting => "requestRangeFormatting", + Self::SemanticTokensFull => "requestSemanticTokensFull", + Self::SemanticTokensRange => "requestSemanticTokensRange", + Self::InlayHints => "requestInlayHints", + Self::DocumentSymbol => "requestDocumentSymbol", + Self::WorkspaceSymbol => "requestWorkspaceSymbol", + Self::CodeLens => "requestCodeLens", + Self::ExecuteCommand => "requestExecuteCommand", + Self::ExecuteCodeLens => "requestExecuteCodeLens", + Self::Custom => "requestCustom", + } + } +} + +/// Tracks pending request IDs and named aliases while compiling one scenario. +#[derive(Debug)] +pub(super) struct RequestRegistry { + next_id: i32, + named: HashMap, + pending: HashMap>, +} + +impl RequestRegistry { + /// Create an empty request registry with IDs starting at `1`. + pub(super) fn new() -> Self { + Self { + next_id: 1, + named: HashMap::new(), + pending: HashMap::new(), + } + } + + /// Allocate a new request ID and optionally bind a unique alias. + /// + /// The new ID is always queued under `kind` so alias-based claims can enforce + /// one-to-one request/expect matching. + pub(super) fn allocate( + &mut self, + kind: RequestKind, + name: Option, + ) -> Result { + let id = self.next_id; + self.next_id = self + .next_id + .checked_add(1) + .ok_or(RequestRegistryError::RequestIdOverflow)?; + + if let Some(name) = name { + if self.named.contains_key(&name) { + return Err(RequestRegistryError::DuplicateAlias { name }); + } + self.named.insert(name, (kind, id)); + } + + self.pending.entry(kind).or_default().push_back(id); + Ok(id) + } + + /// Claim a pending request ID for `kind` by explicit alias. + pub(super) fn claim( + &mut self, + kind: RequestKind, + name: &str, + ) -> Result { + let (named_kind, id) = + self.named + .get(name) + .copied() + .ok_or_else(|| RequestRegistryError::UnknownAlias { + name: name.to_string(), + kind, + })?; + if named_kind != kind { + return Err(RequestRegistryError::AliasKindMismatch { + name: name.to_string(), + alias_kind: named_kind, + requested_kind: kind, + }); + } + let queue = self.pending.entry(kind).or_default(); + let Some(index) = queue.iter().position(|candidate| *candidate == id) else { + return Err(RequestRegistryError::AliasAlreadyMatched { + name: name.to_string(), + kind, + }); + }; + let Some(claimed) = queue.remove(index) else { + return Err(RequestRegistryError::ClaimQueueCorrupted { + name: name.to_string(), + kind, + }); + }; + Ok(claimed) + } + + /// Resolve a named request ID without consuming it from pending queues. + pub(super) fn resolve( + &self, + kind: RequestKind, + name: &str, + ) -> Result { + let (named_kind, id) = + self.named + .get(name) + .copied() + .ok_or_else(|| RequestRegistryError::UnknownAlias { + name: name.to_string(), + kind, + })?; + if named_kind != kind { + return Err(RequestRegistryError::AliasKindMismatch { + name: name.to_string(), + alias_kind: named_kind, + requested_kind: kind, + }); + } + Ok(id) + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/semantic_tokens.rs b/crates/jrsonnet-lsp-scenario/src/semantic_tokens.rs new file mode 100644 index 00000000..249b9591 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/semantic_tokens.rs @@ -0,0 +1,80 @@ +use jrsonnet_lsp_types::{SemanticTokenModifierName, SemanticTokenTypeName}; +use lsp_types::{SemanticToken, SemanticTokens}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct ExpectedSemanticToken { + pub line: u32, + pub start: u32, + pub len: u32, + pub token_type: SemanticTokenTypeName, + pub modifiers: u32, +} + +impl ExpectedSemanticToken { + #[must_use] + pub const fn new( + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: u32, + ) -> Self { + Self { + line, + start, + len, + token_type, + modifiers, + } + } +} + +#[must_use] +pub fn semantic_token( + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: &[SemanticTokenModifierName], +) -> ExpectedSemanticToken { + ExpectedSemanticToken::new(line, start, len, token_type, semantic_modifiers(modifiers)) +} + +#[must_use] +pub fn semantic_modifiers(modifiers: &[SemanticTokenModifierName]) -> u32 { + modifiers + .iter() + .fold(0_u32, |acc, modifier| acc | modifier.as_bitset()) +} + +#[must_use] +pub fn encode_semantic_tokens(mut tokens: Vec) -> SemanticTokens { + tokens.sort_by_key(|token| (token.line, token.start)); + + let mut encoded = Vec::with_capacity(tokens.len()); + let mut prev_line = 0_u32; + let mut prev_start = 0_u32; + + for token in tokens { + let delta_line = token.line.saturating_sub(prev_line); + let delta_start = if delta_line == 0 { + token.start.saturating_sub(prev_start) + } else { + token.start + }; + encoded.push(SemanticToken { + delta_line, + delta_start, + length: token.len, + token_type: token.token_type.as_index(), + token_modifiers_bitset: token.modifiers, + }); + prev_line = token.line; + prev_start = token.start; + } + + SemanticTokens { + result_id: None, + data: encoded, + } +} diff --git a/crates/jrsonnet-lsp-scope/Cargo.toml b/crates/jrsonnet-lsp-scope/Cargo.toml new file mode 100644 index 00000000..2d226745 --- /dev/null +++ b/crates/jrsonnet-lsp-scope/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "jrsonnet-lsp-scope" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Scope resolution for jrsonnet LSP" + +[dependencies] +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +rowan.workspace = true +rustc-hash.workspace = true + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-scope/src/bindings.rs b/crates/jrsonnet-lsp-scope/src/bindings.rs new file mode 100644 index 00000000..6da9e1ef --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/bindings.rs @@ -0,0 +1,169 @@ +//! Scope binding utility functions. +//! +//! This module provides utility functions for identifying definition sites +//! and variable references in the Jsonnet AST. +//! +//! Import-related utilities are provided by `jrsonnet_lsp_import`. +//! General AST utilities (`token_at_offset`, `to_lsp_range`, etc.) are provided by +//! `jrsonnet_lsp_document`. + +use jrsonnet_rowan_parser::{SyntaxKind, SyntaxToken}; + +/// Check if a token is at a definition site (binding name, parameter, etc.) +/// +/// A definition site is where a name is bound (declared), as opposed to where it's used. +/// This includes: +/// - Local variable bindings: `local x = ...` +/// - Function names: `local f(x) = ...` +/// - Function parameters: `function(x)` +#[must_use] +pub fn is_definition_site(token: &SyntaxToken) -> bool { + let Some(parent) = token.parent() else { + return false; + }; + + // Must be a Name node + if parent.kind() != SyntaxKind::NAME { + return false; + } + + // Check grandparent to see if this is a definition + let Some(grandparent) = parent.parent() else { + return false; + }; + + // These are definition contexts + matches!( + grandparent.kind(), + SyntaxKind::DESTRUCT_FULL | SyntaxKind::BIND_FUNCTION + ) +} + +/// Check if an identifier token is a variable reference (not a definition). +/// +/// A variable reference is a use of a previously-defined name. +/// This checks if the token is part of an `ExprVar` node. +#[must_use] +pub fn is_variable_reference(token: &SyntaxToken) -> bool { + let Some(parent) = token.parent() else { + return false; + }; + + if parent.kind() != SyntaxKind::NAME { + return false; + } + + let Some(grandparent) = parent.parent() else { + return false; + }; + + grandparent.kind() == SyntaxKind::EXPR_VAR +} + +/// Check if a token can be renamed (is either a definition or reference to a local binding). +#[must_use] +pub fn is_renameable(token: &SyntaxToken) -> bool { + is_definition_site(token) || is_variable_reference(token) +} + +/// Check if a token is defined at file scope (top-level). +/// +/// A file-scope definition is one that could potentially be exported +/// from a file via an import. +#[must_use] +pub fn is_at_file_scope(token: &SyntaxToken) -> bool { + let mut node = token.parent(); + + // Walk up the tree looking for the depth + let mut depth = 0; + while let Some(n) = node { + match n.kind() { + SyntaxKind::STMT_LOCAL => depth += 1, + SyntaxKind::EXPR => { + // Check if this is the root expression + if n.parent() + .is_some_and(|p| p.kind() == SyntaxKind::SOURCE_FILE) + { + // File-level locals are at depth 1 + return depth <= 1; + } + } + _ => {} + } + node = n.parent(); + } + + false +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_rowan_parser::AstNode; + + use super::*; + + #[test] + fn test_is_definition_site() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the 'x' tokens + let mut found_definition = false; + let mut found_reference = false; + for token in ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() == SyntaxKind::IDENT && token.text() == "x" { + if is_definition_site(&token) { + found_definition = true; + } else if is_variable_reference(&token) { + found_reference = true; + } + } + } + assert!(found_definition, "Should find definition site"); + assert!(found_reference, "Should find reference site"); + } + + #[test] + fn test_is_variable_reference() { + let code = "local x = 1; x + x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Count references + let ref_count = ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .filter(|t| { + t.kind() == SyntaxKind::IDENT && t.text() == "x" && is_variable_reference(t) + }) + .count(); + + assert_eq!(ref_count, 2, "Should find 2 variable references"); + } + + #[test] + fn test_is_at_file_scope() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let def_token = ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .find(|t| t.kind() == SyntaxKind::IDENT && t.text() == "x" && is_definition_site(t)) + .expect("should find definition token for x"); + + assert!( + is_at_file_scope(&def_token), + "Top-level local should be at file scope" + ); + } +} diff --git a/crates/jrsonnet-lsp-scope/src/lib.rs b/crates/jrsonnet-lsp-scope/src/lib.rs new file mode 100644 index 00000000..bfd5317e --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/lib.rs @@ -0,0 +1,22 @@ +//! Scope resolution for Jsonnet LSP. +//! +//! This crate provides scope resolution and binding tracking for Jsonnet code. +//! It includes utilities for: +//! - Finding definitions of variables +//! - Finding all references to a binding +//! - Identifying definition sites vs. variable references +//! - Efficient scope indexing with O(log n) lookups + +pub mod bindings; +pub mod resolver; +pub mod stdlib; + +pub use bindings::{is_at_file_scope, is_definition_site, is_renameable, is_variable_reference}; +pub use resolver::{ + check_bind_for_name, check_param_for_name, check_scope_for_definition, find_all_references, + find_all_references_for_rename, find_definition_range, references_definition, ScopeIndex, + ScopeResolver, +}; +pub use stdlib::{ + expr_resolves_to_builtin_std, ident_resolves_to_builtin_std, var_resolves_to_builtin_std, +}; diff --git a/crates/jrsonnet-lsp-scope/src/resolver/definitions.rs b/crates/jrsonnet-lsp-scope/src/resolver/definitions.rs new file mode 100644 index 00000000..102c79ac --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/resolver/definitions.rs @@ -0,0 +1,260 @@ +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, BindFunction, Destruct, ExprFunction, ForSpec, MemberBindStmt, Param, StmtLocal, + }, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use rowan::TextRange; + +/// Find the definition range of a symbol by walking up the scope chain. +/// +/// Starting from a token that references a variable, this walks up the AST +/// looking for the binding that defines the variable. +#[must_use] +pub fn find_definition_range(token: &SyntaxToken, name: &str) -> Option { + let mut current = token.parent()?; + + while let Some(parent) = current.parent() { + if let Some(range) = check_scope_for_definition(&parent, ¤t, name) { + return Some(range); + } + current = parent; + } + + None +} + +/// Check if a scope contains a definition for the given name. +/// +/// `child` is the node we came from (used for visibility checking). +#[must_use] +pub fn check_scope_for_definition( + scope: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + match scope.kind() { + SyntaxKind::EXPR => check_expr_for_definition(scope, child, name), + SyntaxKind::EXPR_FUNCTION => check_function_for_definition(scope, name), + SyntaxKind::BIND_FUNCTION => check_bind_function_for_definition(scope, name), + SyntaxKind::FOR_SPEC => check_for_spec_for_definition(scope, name), + SyntaxKind::OBJ_BODY_MEMBER_LIST => check_object_for_definition(scope, name), + // Array/object comprehensions: the FOR_SPEC bindings are visible to the expression + SyntaxKind::EXPR_ARRAY_COMP | SyntaxKind::OBJ_BODY_COMP => { + check_comprehension_for_definition(scope, name) + } + _ => None, + } +} + +/// Check an Expr for local definitions. +/// +/// Local definitions are only visible after their declaration point, +/// so we only check bindings that appear before the reference. +fn check_expr_for_definition( + expr: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + let mut last_match = None; + + for stmt_node in expr.children() { + if stmt_node.kind() != SyntaxKind::STMT_LOCAL { + continue; + } + + // Only consider bindings that appear before our reference. + if stmt_node.text_range().end() > child.text_range().start() { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let Some(range) = check_bind_for_name(&bind, name) { + // Keep track of the last (nearest) match for shadowing. + last_match = Some(range); + } + } + } + } + + last_match +} + +/// Check a Bind for a name. +#[must_use] +pub fn check_bind_for_name(bind: &Bind, name: &str) -> Option { + match bind { + Bind::BindDestruct(bd) => { + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() == name { + return Some(bind_name.syntax().text_range()); + } + } + None + } + Bind::BindFunction(bf) => { + let bind_name = bf.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() == name { + return Some(bind_name.syntax().text_range()); + } + None + } + } +} + +/// Check function parameters for a definition. +fn check_function_for_definition(func_node: &SyntaxNode, name: &str) -> Option { + let func = ExprFunction::cast(func_node.clone())?; + let params = func.params_desc()?; + + for param in params.params() { + if let Some(range) = check_param_for_name(¶m, name) { + return Some(range); + } + } + None +} + +/// Check `BindFunction` parameters for a definition. +fn check_bind_function_for_definition(func_node: &SyntaxNode, name: &str) -> Option { + let func = BindFunction::cast(func_node.clone())?; + let params = func.params()?; + + for param in params.params() { + if let Some(range) = check_param_for_name(¶m, name) { + return Some(range); + } + } + None +} + +/// Check a parameter for a name. +#[must_use] +pub fn check_param_for_name(param: &Param, name: &str) -> Option { + let destruct = param.destruct()?; + if let Destruct::DestructFull(full) = destruct { + let param_name = full.name()?; + let ident = param_name.ident_lit()?; + if ident.text() == name { + return Some(param_name.syntax().text_range()); + } + } + None +} + +/// Check `ForSpec` for a definition. +fn check_for_spec_for_definition(for_node: &SyntaxNode, name: &str) -> Option { + let for_spec = ForSpec::cast(for_node.clone())?; + let destruct = for_spec.bind()?; + + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() == name { + return Some(bind_name.syntax().text_range()); + } + } + None +} + +/// Check object locals for a definition. +fn check_object_for_definition(obj_body: &SyntaxNode, name: &str) -> Option { + for member_node in obj_body.children() { + if member_node.kind() != SyntaxKind::MEMBER_BIND_STMT { + continue; + } + if let Some(member_bind) = MemberBindStmt::cast(member_node) { + if let Some(obj_local) = member_bind.obj_local() { + if let Some(bind) = obj_local.bind() { + if let Some(range) = check_bind_for_name(&bind, name) { + return Some(range); + } + } + } + } + } + None +} + +/// Check comprehension (array or object) for `FOR_SPEC` definitions. +/// +/// In `[x for x in arr]`, the `FOR_SPEC` binding is visible to the expression. +fn check_comprehension_for_definition(comp_node: &SyntaxNode, name: &str) -> Option { + for child in comp_node.children() { + if child.kind() != SyntaxKind::FOR_SPEC { + continue; + } + if let Some(range) = check_for_spec_for_definition(&child, name) { + return Some(range); + } + } + None +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{token_at_offset, ByteOffset, DocVersion, Document}; + use jrsonnet_rowan_parser::AstNode; + + use super::find_definition_range; + + #[test] + fn test_find_definition_range_local_variable() { + let code = "local x = 1; x + 1"; + // ^def ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the reference 'x' at position 13 + let token = token_at_offset(ast.syntax(), ByteOffset::from(13u32)) + .expect("should find token at position 13"); + assert_eq!(token.text(), "x"); + + let range = find_definition_range(&token, "x").expect("should find definition range"); + + // Definition is at position 6 + assert_eq!(range.start(), 6.into()); + } + + #[test] + fn test_find_definition_range_function_param() { + let code = "local f(x) = x * 2; f(3)"; + // ^param ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the reference 'x' at position 13 + let token = token_at_offset(ast.syntax(), ByteOffset::from(13u32)) + .expect("should find token at position 13"); + assert_eq!(token.text(), "x"); + + let range = + find_definition_range(&token, "x").expect("should find definition range for parameter"); + + // Parameter is at position 8 + assert_eq!(range.start(), 8.into()); + } + + #[test] + fn test_shadowing() { + let code = "local x = 1; local x = 2; x"; + // ^def1 ^def2 ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the final 'x' reference + let token = token_at_offset(ast.syntax(), ByteOffset::from(26u32)) + .expect("should find token at position 26"); + assert_eq!(token.text(), "x"); + + let range = find_definition_range(&token, "x").expect("should find definition range"); + + // Should resolve to the second (closer) definition at position 19 + assert_eq!(range.start(), 19.into()); + } +} diff --git a/crates/jrsonnet-lsp-scope/src/resolver/mod.rs b/crates/jrsonnet-lsp-scope/src/resolver/mod.rs new file mode 100644 index 00000000..fa1b4200 --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/resolver/mod.rs @@ -0,0 +1,20 @@ +//! Scope resolution for Jsonnet AST. +//! +//! This module provides shared functionality for resolving symbol definitions +//! and finding references within Jsonnet code. +//! +//! - `definitions`: linear scope walking definition lookup. +//! - `references`: reference collection and cached `ScopeResolver`. +//! - `scope_index`: indexed scope tree for O(log n) lookups. + +mod definitions; +mod references; +mod scope_index; + +pub use definitions::{ + check_bind_for_name, check_param_for_name, check_scope_for_definition, find_definition_range, +}; +pub use references::{ + find_all_references, find_all_references_for_rename, references_definition, ScopeResolver, +}; +pub use scope_index::ScopeIndex; diff --git a/crates/jrsonnet-lsp-scope/src/resolver/references.rs b/crates/jrsonnet-lsp-scope/src/resolver/references.rs new file mode 100644 index 00000000..1fc965c1 --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/resolver/references.rs @@ -0,0 +1,253 @@ +use jrsonnet_rowan_parser::{SyntaxKind, SyntaxNode, SyntaxToken}; +use rowan::{TextRange, TextSize}; +use rustc_hash::FxHashMap; + +use super::definitions::{check_scope_for_definition, find_definition_range}; +use crate::bindings::{is_definition_site, is_variable_reference}; + +/// Check if a reference resolves to a specific definition. +/// +/// Walks up the scope chain from the token to find its definition, +/// then checks if it matches the expected definition range. +#[must_use] +pub fn references_definition(token: &SyntaxToken, name: &str, def_range: TextRange) -> bool { + let Some(mut current) = token.parent() else { + return false; + }; + + while let Some(parent) = current.parent() { + if let Some(found_range) = check_scope_for_definition(&parent, ¤t, name) { + return found_range == def_range; + } + current = parent; + } + + false +} + +/// Find all references to a name in the AST. +/// +/// This function walks the entire AST looking for identifiers that: +/// 1. Match the given name +/// 2. Are either the definition or references that resolve to the definition +/// +/// The `definition_range` should be the range of the Name node at the definition site. +pub fn find_all_references( + root: &SyntaxNode, + name: &str, + definition_range: TextRange, +) -> Vec { + let mut references = Vec::new(); + + // Walk all tokens looking for identifiers matching the name. + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() == SyntaxKind::IDENT && token.text() == name { + // Check if this is a reference (ExprVar). + if is_variable_reference(&token) { + // It's a reference - check if it resolves to our definition. + if references_definition(&token, name, definition_range) { + references.push(token.text_range()); + } + } else if is_definition_site(&token) { + // It's a definition - check if it matches our target definition. + if let Some(parent) = token.parent() { + if parent.text_range() == definition_range { + references.push(parent.text_range()); + } + } + } + } + } + + references +} + +/// Find all references including both definition and uses, returning identifier ranges. +/// +/// This is a variant of `find_all_references` that returns the identifier token ranges +/// instead of the Name node ranges. This is useful for rename operations where +/// we want to replace just the identifier text. +pub fn find_all_references_for_rename( + root: &SyntaxNode, + name: &str, + definition_range: TextRange, +) -> Vec { + let mut references = Vec::new(); + + // Walk all tokens looking for identifiers matching the name. + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() == SyntaxKind::IDENT && token.text() == name { + // Check if this is a reference (ExprVar). + if is_variable_reference(&token) { + // It's a reference - check if it resolves to our definition. + if references_definition(&token, name, definition_range) { + // For rename, we want just the identifier range, not the Name node. + references.push(token.text_range()); + } + } else if is_definition_site(&token) { + // It's a definition - check if it matches our target definition. + if let Some(parent) = token.parent() { + if parent.text_range() == definition_range { + // Return the identifier range, not the Name node. + references.push(token.text_range()); + } + } + } + } + } + + references +} + +/// Cached scope resolver for efficient repeated lookups. +/// +/// Precomputes a mapping from each variable reference to its definition. +pub struct ScopeResolver { + /// Maps reference token start position to definition's `TextRange`. + reference_to_def: FxHashMap, +} + +impl ScopeResolver { + /// Build a scope resolver for the given AST root. + /// + /// Walks the AST once to build the scope map. + pub fn new(root: &SyntaxNode) -> Self { + let mut reference_to_def = FxHashMap::default(); + + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT { + continue; + } + + if !is_variable_reference(&token) { + continue; + } + + if let Some(def_range) = find_definition_range(&token, token.text()) { + reference_to_def.insert(token.text_range().start(), def_range); + } + } + + Self { reference_to_def } + } + + /// Get the definition range for a reference token. + /// + /// Returns the `TextRange` of the Name node at the definition site, + /// or None if the token is not a reference or has no definition. + #[must_use] + pub fn get_definition(&self, token: &SyntaxToken) -> Option { + self.reference_to_def + .get(&token.text_range().start()) + .copied() + } + + /// Check if a reference resolves to a specific definition. + #[must_use] + pub fn references_definition(&self, token: &SyntaxToken, def_range: TextRange) -> bool { + self.get_definition(token) == Some(def_range) + } + + /// Find all references to a definition, returning identifier token ranges. + pub fn find_references( + &self, + root: &SyntaxNode, + name: &str, + definition_range: TextRange, + ) -> Vec { + let mut references = Vec::new(); + + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT || token.text() != name { + continue; + } + + if is_variable_reference(&token) { + if self.references_definition(&token, definition_range) { + references.push(token.text_range()); + } + continue; + } + + if !is_definition_site(&token) { + continue; + } + + let Some(parent) = token.parent() else { + continue; + }; + + if parent.text_range() == definition_range { + references.push(token.text_range()); + } + } + + references + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_rowan_parser::AstNode; + use rowan::TextRange; + + use super::find_all_references; + + #[test] + fn test_find_all_references() { + let code = "local x = 1; x + x"; + // ^def ^ref ^ref + // 0123456789... + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Get the definition range (Name node at position 6) + let def_range = TextRange::new(6.into(), 7.into()); + + let refs = find_all_references(ast.syntax(), "x", def_range); + // def at 6, refs at 13 and 17 + assert_eq!( + refs, + vec![ + TextRange::new(6.into(), 7.into()), // definition + TextRange::new(13.into(), 14.into()), // first use + TextRange::new(17.into(), 18.into()), // second use + ] + ); + } + + #[test] + fn test_references_respects_scope() { + let code = "local x = 1; local f(x) = x; x"; + // ^def1 ^def2 ^ref2 ^ref1 + // 0123456789... + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Get the definition range for outer x (Name node at position 6) + let def_range = TextRange::new(6.into(), 7.into()); + + let refs = find_all_references(ast.syntax(), "x", def_range); + // Should find: the definition (6) and the last reference (29), not the inner x. + assert_eq!( + refs, + vec![ + TextRange::new(6.into(), 7.into()), // outer x definition + TextRange::new(29.into(), 30.into()), // final reference to outer x + ] + ); + } +} diff --git a/crates/jrsonnet-lsp-scope/src/resolver/scope_index.rs b/crates/jrsonnet-lsp-scope/src/resolver/scope_index.rs new file mode 100644 index 00000000..c6acf59a --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/resolver/scope_index.rs @@ -0,0 +1,702 @@ +use std::sync::RwLock; + +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, BindFunction, Destruct, ExprFunction, ForSpec, MemberBindStmt, Param, StmtLocal, + }, + AstNode, SyntaxKind, SyntaxNode, +}; +use rowan::{TextRange, TextSize}; +use rustc_hash::FxHashMap; + +/// A binding in a scope - maps a name to its definition range. +#[derive(Debug, Clone)] +struct ScopeBinding { + /// The name of the binding. + name: String, + /// The `TextRange` of the definition (Name node). + range: TextRange, + /// The position after which this binding is visible (for local bindings). + /// None means visible throughout the scope (e.g., function params). + visible_after: Option, +} + +/// A scope with its bindings. +#[derive(Debug, Clone)] +struct IndexedScope { + /// The range of this scope. + range: TextRange, + /// The index of the parent scope in the scopes vector, or None for root. + parent: Option, + /// Bindings in this scope. + bindings: Vec, +} + +/// Indexed scope structure for efficient O(log n) lookups. +/// +/// Builds a scope tree once and uses binary search to find scopes containing +/// a given position. This is more efficient than walking the AST for each lookup. +/// +/// Scope chains are memoized for repeated lookups at the same scope. +pub struct ScopeIndex { + /// Scopes sorted by start position. + scopes: Vec, + /// Map from scope start position to index for quick lookup. + scope_starts: Vec<(TextSize, usize)>, + /// Cached scope chains: `scope_index` -> chain of scope ranges (innermost first). + scope_chain_cache: RwLock>>, + /// Cached bindings per scope chain: `scope_index` -> all bindings in chain (with visibility info). + bindings_cache: RwLock>>, +} + +/// A cached binding with visibility information for filtering at query time. +#[derive(Debug, Clone)] +struct CachedBinding { + name: String, + range: TextRange, + /// Position after which this binding is visible, or None if always visible. + visible_after: Option, +} + +impl ScopeIndex { + /// Build a scope index from an AST root. + /// + /// Walks the AST once to collect all scopes and their bindings. + #[must_use] + pub fn new(root: &SyntaxNode) -> Self { + let mut scopes = Vec::new(); + let mut scope_stack: Vec = Vec::new(); + + Self::collect_scopes(root, &mut scopes, &mut scope_stack); + + // Build sorted index for binary search + let mut scope_starts: Vec<(TextSize, usize)> = scopes + .iter() + .enumerate() + .map(|(i, s)| (s.range.start(), i)) + .collect(); + scope_starts.sort_by_key(|(pos, _)| *pos); + + Self { + scopes, + scope_starts, + scope_chain_cache: RwLock::new(FxHashMap::default()), + bindings_cache: RwLock::new(FxHashMap::default()), + } + } + + /// Collect scopes recursively from the AST. + fn collect_scopes( + node: &SyntaxNode, + scopes: &mut Vec, + scope_stack: &mut Vec, + ) { + let is_scope = matches!( + node.kind(), + SyntaxKind::EXPR_FUNCTION + | SyntaxKind::BIND_FUNCTION + | SyntaxKind::FOR_SPEC + | SyntaxKind::OBJ_BODY_MEMBER_LIST + | SyntaxKind::EXPR_ARRAY_COMP + | SyntaxKind::OBJ_BODY_COMP + | SyntaxKind::EXPR + ); + + let scope_idx = if is_scope { + let parent = scope_stack.last().copied(); + let bindings = Self::extract_bindings(node); + let idx = scopes.len(); + scopes.push(IndexedScope { + range: node.text_range(), + parent, + bindings, + }); + scope_stack.push(idx); + Some(idx) + } else { + None + }; + + // Recurse into children + for child in node.children() { + Self::collect_scopes(&child, scopes, scope_stack); + } + + if scope_idx.is_some() { + scope_stack.pop(); + } + } + + /// Extract bindings from a scope node. + fn extract_bindings(node: &SyntaxNode) -> Vec { + match node.kind() { + SyntaxKind::EXPR => Self::extract_expr_bindings(node), + SyntaxKind::EXPR_FUNCTION => Self::extract_expr_function_bindings(node), + SyntaxKind::BIND_FUNCTION => Self::extract_bind_function_bindings(node), + SyntaxKind::FOR_SPEC => ForSpec::cast(node.clone()) + .and_then(|for_spec| Self::for_spec_binding(&for_spec)) + .into_iter() + .collect(), + SyntaxKind::OBJ_BODY_MEMBER_LIST => Self::extract_object_local_bindings(node), + SyntaxKind::EXPR_ARRAY_COMP | SyntaxKind::OBJ_BODY_COMP => { + Self::extract_comprehension_bindings(node) + } + _ => Vec::new(), + } + } + + fn make_binding( + name: String, + range: TextRange, + visible_after: Option, + ) -> ScopeBinding { + ScopeBinding { + name, + range, + visible_after, + } + } + + fn extract_expr_bindings(node: &SyntaxNode) -> Vec { + node.children() + .filter(|stmt_node| stmt_node.kind() == SyntaxKind::STMT_LOCAL) + .filter_map(StmtLocal::cast) + .flat_map(|stmt_local| { + let visible_after = Some(stmt_local.syntax().text_range().end()); + stmt_local.binds().filter_map(move |bind| { + Self::binding_name_and_range(&bind) + .map(|(name, range)| Self::make_binding(name, range, visible_after)) + }) + }) + .collect() + } + + fn extract_expr_function_bindings(node: &SyntaxNode) -> Vec { + let Some(func) = ExprFunction::cast(node.clone()) else { + return Vec::new(); + }; + let Some(params) = func.params_desc() else { + return Vec::new(); + }; + params + .params() + .filter_map(|param| { + Self::param_name_and_range(¶m) + .map(|(name, range)| Self::make_binding(name, range, None)) + }) + .collect() + } + + fn extract_bind_function_bindings(node: &SyntaxNode) -> Vec { + let Some(func) = BindFunction::cast(node.clone()) else { + return Vec::new(); + }; + let Some(params) = func.params() else { + return Vec::new(); + }; + params + .params() + .filter_map(|param| { + Self::param_name_and_range(¶m) + .map(|(name, range)| Self::make_binding(name, range, None)) + }) + .collect() + } + + fn for_spec_binding(for_spec: &ForSpec) -> Option { + let destruct = for_spec.bind()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + Some(Self::make_binding( + ident.text().to_string(), + bind_name.syntax().text_range(), + None, + )) + } + + fn extract_object_local_bindings(node: &SyntaxNode) -> Vec { + node.children() + .filter(|member_node| member_node.kind() == SyntaxKind::MEMBER_BIND_STMT) + .filter_map(MemberBindStmt::cast) + .filter_map(|member_bind| member_bind.obj_local()) + .filter_map(|obj_local| obj_local.bind()) + .filter_map(|bind| { + Self::binding_name_and_range(&bind) + .map(|(name, range)| Self::make_binding(name, range, None)) + }) + .collect() + } + + fn extract_comprehension_bindings(node: &SyntaxNode) -> Vec { + node.children() + .filter(|child| child.kind() == SyntaxKind::FOR_SPEC) + .filter_map(ForSpec::cast) + .filter_map(|for_spec| Self::for_spec_binding(&for_spec)) + .collect() + } + + /// Extract name and range from a Bind. + fn binding_name_and_range(bind: &Bind) -> Option<(String, TextRange)> { + match bind { + Bind::BindDestruct(bd) => { + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + return Some((ident.text().to_string(), bind_name.syntax().text_range())); + } + None + } + Bind::BindFunction(bf) => { + let bind_name = bf.name()?; + let ident = bind_name.ident_lit()?; + Some((ident.text().to_string(), bind_name.syntax().text_range())) + } + } + } + + /// Extract name and range from a Param. + fn param_name_and_range(param: &Param) -> Option<(String, TextRange)> { + let destruct = param.destruct()?; + if let Destruct::DestructFull(full) = destruct { + let param_name = full.name()?; + let ident = param_name.ident_lit()?; + return Some((ident.text().to_string(), param_name.syntax().text_range())); + } + None + } + + /// Find the innermost scope containing a position. + fn find_innermost_scope(&self, pos: TextSize) -> Option { + // Binary search to find candidate scopes. + let search_idx = self + .scope_starts + .partition_point(|(start, _)| *start <= pos); + + // Check scopes from the found position backwards. + let mut best: Option = None; + let mut best_size = u32::MAX; + + for i in (0..search_idx).rev() { + let Some((_, scope_idx)) = self.scope_starts.get(i).copied() else { + continue; + }; + let Some(scope) = self.scopes.get(scope_idx) else { + continue; + }; + + if !scope.range.contains(pos) { + continue; + } + + let size = scope.range.len().into(); + if size < best_size { + best = Some(scope_idx); + best_size = size; + } + } + + best + } + + /// Find the definition for a name at a given position. + /// + /// Returns the `TextRange` of the definition's Name node. + pub fn find_definition(&self, pos: TextSize, name: &str) -> Option { + let mut scope_idx = self.find_innermost_scope(pos)?; + + loop { + let scope = self.scopes.get(scope_idx)?; + + // Search bindings in reverse order for shadowing (last match wins). + for binding in scope.bindings.iter().rev() { + if binding.name != name { + continue; + } + + // Check visibility. + if let Some(visible_after) = binding.visible_after { + if pos < visible_after { + continue; + } + } + + return Some(binding.range); + } + + // Move to parent scope. + scope_idx = scope.parent?; + } + } + + /// Check if a position references a specific definition. + pub fn references_definition(&self, pos: TextSize, name: &str, def_range: TextRange) -> bool { + self.find_definition(pos, name) == Some(def_range) + } + + /// Get the scope chain for a position (innermost to outermost). + /// + /// Returns a vector of scope ranges from the innermost scope containing + /// the position to the root scope. Results are memoized per scope index. + pub fn scope_chain(&self, pos: TextSize) -> Vec { + let Some(scope_idx) = self.find_innermost_scope(pos) else { + return Vec::new(); + }; + + // Check cache first. + { + let cache = self + .scope_chain_cache + .read() + .unwrap_or_else(std::sync::PoisonError::into_inner); + if let Some(cached) = cache.get(&scope_idx) { + return cached.clone(); + } + } + + // Compute the scope chain. + let chain = self.compute_scope_chain(scope_idx); + + // Cache and return. + self.scope_chain_cache + .write() + .unwrap_or_else(std::sync::PoisonError::into_inner) + .insert(scope_idx, chain.clone()); + chain + } + + /// Compute the scope chain for a given scope index (uncached). + fn compute_scope_chain(&self, start_scope_idx: usize) -> Vec { + let mut chain = Vec::new(); + let mut scope_idx = start_scope_idx; + + loop { + let Some(scope) = self.scopes.get(scope_idx) else { + break; + }; + chain.push(scope.range); + match scope.parent { + Some(parent_idx) => scope_idx = parent_idx, + None => break, + } + } + + chain + } + + /// Get all bindings visible at a position. + /// + /// Returns bindings from innermost to outermost scope, + /// including shadowed names. The cached bindings include visibility info, + /// which is filtered at query time. + pub fn bindings_at(&self, pos: TextSize) -> Vec<(String, TextRange)> { + let Some(scope_idx) = self.find_innermost_scope(pos) else { + return Vec::new(); + }; + + // Get or compute cached bindings for this scope chain. + let cached = self.get_or_compute_bindings(scope_idx); + + // Filter by visibility at the query position. + cached + .into_iter() + .filter(|b| { + b.visible_after + .is_none_or(|visible_after| pos >= visible_after) + }) + .map(|b| (b.name, b.range)) + .collect() + } + + /// Get or compute cached bindings for a scope chain. + fn get_or_compute_bindings(&self, scope_idx: usize) -> Vec { + // Check cache first. + { + let cache = self + .bindings_cache + .read() + .unwrap_or_else(std::sync::PoisonError::into_inner); + if let Some(cached) = cache.get(&scope_idx) { + return cached.clone(); + } + } + + // Compute bindings for the entire scope chain. + let bindings = self.compute_bindings(scope_idx); + + // Cache and return. + self.bindings_cache + .write() + .unwrap_or_else(std::sync::PoisonError::into_inner) + .insert(scope_idx, bindings.clone()); + bindings + } + + /// Compute all bindings in a scope chain (uncached). + fn compute_bindings(&self, start_scope_idx: usize) -> Vec { + let mut bindings = Vec::new(); + let mut scope_idx = start_scope_idx; + + loop { + let Some(scope) = self.scopes.get(scope_idx) else { + break; + }; + + for binding in &scope.bindings { + bindings.push(CachedBinding { + name: binding.name.clone(), + range: binding.range, + visible_after: binding.visible_after, + }); + } + + match scope.parent { + Some(parent_idx) => scope_idx = parent_idx, + None => break, + } + } + + bindings + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; + use rowan::TextRange; + + use super::ScopeIndex; + use crate::{bindings::is_variable_reference, resolver::find_definition_range}; + + #[test] + fn test_scope_index_local_variable() { + let code = "local x = 1; x + 1"; + // ^def ^ref + // 0123456789012345678 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Reference at position 13 should find definition at position 6-7 + let def_range = index.find_definition(13.into(), "x"); + assert_eq!(def_range, Some(TextRange::new(6.into(), 7.into()))); + } + + #[test] + fn test_scope_index_function_param() { + let code = "local f(x) = x * 2; f(3)"; + // ^param ^ref + // 0123456789012345678901234 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Reference at position 13 should find parameter at position 8 + let def_range = index.find_definition(13.into(), "x"); + assert_eq!(def_range, Some(TextRange::new(8.into(), 9.into()))); + } + + #[test] + fn test_scope_index_shadowing() { + let code = "local x = 1; local x = 2; x"; + // ^def1 ^def2 ^ref + // 0123456789012345678901234567890 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Reference at position 26 should find the second (shadowing) definition + let def_range = index.find_definition(26.into(), "x"); + assert_eq!(def_range, Some(TextRange::new(19.into(), 20.into()))); + } + + #[test] + fn test_scope_index_nested_scopes() { + let code = "local x = 1; local f(x) = x; x"; + // ^def1 ^def2 ^ref2 ^ref1 + // 0123456789012345678901234567890 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Reference at position 26 (inside function) should find param at position 21 + let def_range = index.find_definition(26.into(), "x"); + assert_eq!(def_range, Some(TextRange::new(21.into(), 22.into()))); + + // Reference at position 29 (outside function) should find outer x at position 6 + let def_range = index.find_definition(29.into(), "x"); + assert_eq!(def_range, Some(TextRange::new(6.into(), 7.into()))); + } + + #[test] + fn test_scope_index_matches_linear_search() { + // Verify that ScopeIndex produces the same results as the linear search. + let code = "local a = 1; local f(x, y) = x + y; local b = f(a, 2); b"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Test various positions. + for token in ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT { + continue; + } + if !is_variable_reference(&token) { + continue; + } + + let name = token.text(); + let pos = token.text_range().start(); + + let linear_result = find_definition_range(&token, name); + let index_result = index.find_definition(pos, name); + + assert_eq!( + linear_result, index_result, + "Mismatch for '{name}' at position {pos:?}" + ); + } + } + + #[test] + fn test_scope_chain() { + let code = "local f(x) = x * 2; f(3)"; + // 0123456789012345678901234 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Position 13 (inside function body) should have multiple scopes. + let chain = index.scope_chain(13.into()); + // Function body is nested within multiple syntax nodes. + assert_eq!(chain.len(), 4); + } + + #[test] + fn test_bindings_at() { + let code = "local a = 1; local b = 2; a + b"; + // 0123456789012345678901234567890 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // At position 30 (after both bindings), both 'a' and 'b' should be visible. + let bindings = index.bindings_at(30.into()); + let mut names: Vec<_> = bindings.iter().map(|(n, _)| n.as_str()).collect(); + names.sort_unstable(); + assert_eq!(names, vec!["a", "b"]); + } + + #[test] + fn test_scope_chain_cache_consistency() { + // Test that multiple calls to scope_chain return consistent results. + let code = "local f(x) = x * 2; f(3)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Call multiple times at the same position - should return identical results. + let chain1 = index.scope_chain(13.into()); + let chain2 = index.scope_chain(13.into()); + let chain3 = index.scope_chain(13.into()); + + assert_eq!(chain1, chain2, "Repeated calls should return same result"); + assert_eq!(chain2, chain3, "Repeated calls should return same result"); + + // Verify the cache is populated (we get results, proving the mechanism works). + assert!(!chain1.is_empty(), "Should have at least one scope"); + } + + #[test] + fn test_bindings_cache_with_visibility() { + // Test that bindings cache correctly handles visibility filtering. + let code = "local a = 1; local b = 2; local c = 3; a + b + c"; + // 0 1 2 3 4 + // 0123456789012345678901234567890123456789012345678 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // At position 15 (between 'local a' and 'local b'), only 'a' should be visible. + let bindings_15 = index.bindings_at(15.into()); + let names_15: Vec<_> = bindings_15.iter().map(|(n, _)| n.as_str()).collect(); + assert_eq!(names_15, vec!["a"]); + + // At position 28 (between 'local b' and 'local c'), 'a' and 'b' should be visible. + let bindings_28 = index.bindings_at(28.into()); + let mut names_28: Vec<_> = bindings_28.iter().map(|(n, _)| n.as_str()).collect(); + names_28.sort_unstable(); + assert_eq!(names_28, vec!["a", "b"]); + + // At position 45 (after all locals), all should be visible. + let bindings_45 = index.bindings_at(45.into()); + let mut names_45: Vec<_> = bindings_45.iter().map(|(n, _)| n.as_str()).collect(); + names_45.sort_unstable(); + assert_eq!(names_45, vec!["a", "b", "c"]); + + // Repeated call should give same result (using cache). + let bindings_45_again = index.bindings_at(45.into()); + let mut names_45_again: Vec<_> = + bindings_45_again.iter().map(|(n, _)| n.as_str()).collect(); + names_45_again.sort_unstable(); + assert_eq!(names_45, names_45_again); + } + + #[test] + fn test_cache_handles_different_scopes() { + // Test that caching works correctly across different scopes. + let code = "local outer = 1; local f(inner) = inner + outer; outer + f(2)"; + // 0 1 2 3 4 5 6 + // 01234567890123456789012345678901234567890123456789012345678901234 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Inside function (position 35, the 'inner' reference). + let bindings_in_func = index.bindings_at(35.into()); + let mut names_in_func: Vec<_> = bindings_in_func.iter().map(|(n, _)| n.as_str()).collect(); + names_in_func.sort_unstable(); + assert!( + names_in_func.contains(&"inner"), + "Should see 'inner' inside function" + ); + assert!( + names_in_func.contains(&"outer"), + "Should see 'outer' inside function" + ); + + // Outside function (position 58, after function definition). + let bindings_outside = index.bindings_at(58.into()); + let names_outside: Vec<_> = bindings_outside.iter().map(|(n, _)| n.as_str()).collect(); + assert!( + !names_outside.contains(&"inner"), + "Should NOT see 'inner' outside function" + ); + assert!( + names_outside.contains(&"outer"), + "Should see 'outer' outside function" + ); + assert!( + names_outside.contains(&"f"), + "Should see 'f' outside function" + ); + } +} diff --git a/crates/jrsonnet-lsp-scope/src/stdlib.rs b/crates/jrsonnet-lsp-scope/src/stdlib.rs new file mode 100644 index 00000000..b829f3da --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/stdlib.rs @@ -0,0 +1,189 @@ +//! Standard-library (`std`) binding resolution helpers. +//! +//! These helpers determine whether an expression resolves to the built-in +//! Jsonnet `std` object, following lexical scope and aliases. + +use jrsonnet_rowan_parser::{ + nodes::{BindDestruct, Expr, ExprBase, ExprVar}, + AstNode, SyntaxKind, SyntaxToken, +}; +use rowan::TextRange; +use rustc_hash::FxHashSet; + +use crate::find_definition_range; + +/// Return true if this expression resolves to the built-in `std` object. +#[must_use] +pub fn expr_resolves_to_builtin_std(expr: &Expr) -> bool { + let mut seen_defs = FxHashSet::default(); + expr_resolves_to_builtin_std_inner(expr, &mut seen_defs) +} + +/// Return true if this variable expression resolves to the built-in `std` object. +#[must_use] +pub fn var_resolves_to_builtin_std(var: &ExprVar) -> bool { + let Some(ident) = var.name().and_then(|name| name.ident_lit()) else { + return false; + }; + + let mut seen_defs = FxHashSet::default(); + ident_resolves_to_builtin_std_inner(&ident, &mut seen_defs) +} + +/// Return true if this identifier token resolves to the built-in `std` object. +#[must_use] +pub fn ident_resolves_to_builtin_std(ident: &SyntaxToken) -> bool { + if ident.kind() != SyntaxKind::IDENT { + return false; + } + + let mut seen_defs = FxHashSet::default(); + ident_resolves_to_builtin_std_inner(ident, &mut seen_defs) +} + +fn expr_resolves_to_builtin_std_inner(expr: &Expr, seen_defs: &mut FxHashSet) -> bool { + let Some(base) = expr.expr_base() else { + return false; + }; + + match base { + ExprBase::ExprParened(parens) => { + let Some(inner) = parens.expr() else { + return false; + }; + expr_resolves_to_builtin_std_inner(&inner, seen_defs) + } + ExprBase::ExprVar(var) => { + let Some(ident) = var.name().and_then(|name| name.ident_lit()) else { + return false; + }; + ident_resolves_to_builtin_std_inner(&ident, seen_defs) + } + _ => false, + } +} + +fn ident_resolves_to_builtin_std_inner( + ident: &SyntaxToken, + seen_defs: &mut FxHashSet, +) -> bool { + let name = ident.text(); + let def_range = find_definition_range(ident, name); + + // Bare `std` with no local definition resolves to builtin std. + if name == "std" && def_range.is_none() { + return true; + } + + let Some(def_range) = def_range else { + return false; + }; + if !seen_defs.insert(def_range) { + // Cycle detected in alias chain. + return false; + } + + let Some(root) = ident.parent_ancestors().last() else { + return false; + }; + let Some(def_node) = root + .descendants() + .find(|node| node.text_range() == def_range) + else { + return false; + }; + let Some(bind_node) = def_node.ancestors().find(|node| { + matches!( + node.kind(), + SyntaxKind::BIND_DESTRUCT | SyntaxKind::BIND_FUNCTION + ) + }) else { + return false; + }; + let Some(bind_destruct) = BindDestruct::cast(bind_node) else { + return false; + }; + let Some(value_expr) = bind_destruct.value() else { + return false; + }; + expr_resolves_to_builtin_std_inner(&value_expr, seen_defs) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_rowan_parser::{nodes::ExprBase, AstNode}; + use rowan::NodeOrToken; + + use super::*; + + fn make_doc(code: &str) -> Document { + Document::new(code.to_string(), DocVersion::new(1)) + } + + fn token_for_ident(code: &str, needle: &str, occurrence: usize) -> SyntaxToken { + let doc = make_doc(code); + let ast = doc.ast(); + ast.syntax() + .descendants_with_tokens() + .filter_map(NodeOrToken::into_token) + .filter(|token| token.kind() == SyntaxKind::IDENT && token.text() == needle) + .nth(occurrence) + .expect("identifier token") + } + + fn root_field_base_expr(code: &str) -> Expr { + let doc = make_doc(code); + let ast = doc.ast(); + let expr = ast.expr().expect("root expr"); + let ExprBase::ExprField(field) = expr.expr_base().expect("root base") else { + panic!("expected field expression"); + }; + field.base().expect("field base") + } + + #[test] + fn test_ident_resolves_builtin_std_direct() { + let ident = token_for_ident("std.length", "std", 0); + assert!(ident_resolves_to_builtin_std(&ident)); + } + + #[test] + fn test_ident_resolves_builtin_std_alias() { + let ident = token_for_ident("local s = std; s.length", "s", 1); + assert!(ident_resolves_to_builtin_std(&ident)); + } + + #[test] + fn test_ident_resolves_builtin_std_alias_chain() { + let ident = token_for_ident("local s = std; local t = s; t.length", "t", 1); + assert!(ident_resolves_to_builtin_std(&ident)); + } + + #[test] + fn test_ident_resolves_builtin_std_shadowed_std() { + let ident = token_for_ident("local std = { length(x): x }; std.length", "std", 1); + assert!(!ident_resolves_to_builtin_std(&ident)); + } + + #[test] + fn test_ident_resolves_builtin_std_cycle() { + let ident = token_for_ident("local a = b; local b = a; a.length", "a", 2); + assert!(!ident_resolves_to_builtin_std(&ident)); + } + + #[test] + fn test_expr_resolves_builtin_std_parenthesized_alias() { + let base = root_field_base_expr("local s = std; (s).length"); + assert!(expr_resolves_to_builtin_std(&base)); + } + + #[test] + fn test_var_resolves_builtin_std_shadowed() { + let base = root_field_base_expr("local std = { length(x): x }; std.length"); + let ExprBase::ExprVar(var) = base.expr_base().expect("base kind") else { + panic!("expected expr var"); + }; + assert!(!var_resolves_to_builtin_std(&var)); + } +} diff --git a/crates/jrsonnet-lsp-stdlib/Cargo.toml b/crates/jrsonnet-lsp-stdlib/Cargo.toml new file mode 100644 index 00000000..38830462 --- /dev/null +++ b/crates/jrsonnet-lsp-stdlib/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "jrsonnet-lsp-stdlib" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Standard library signatures and documentation for jrsonnet LSP" + +[dependencies] +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } +jrsonnet-std-sig = { version = "0.5.0-pre97", path = "../jrsonnet-std-sig" } + +[dev-dependencies] +indoc = "2" + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-stdlib/src/docs.rs b/crates/jrsonnet-lsp-stdlib/src/docs.rs new file mode 100644 index 00000000..35b8c80c --- /dev/null +++ b/crates/jrsonnet-lsp-stdlib/src/docs.rs @@ -0,0 +1,189 @@ +//! Standard library function documentation. +//! +//! Documentation for Jsonnet standard library functions. +//! Documentation strings are sourced from the `jrsonnet-std-sig` crate spec. + +use std::{collections::HashMap, sync::OnceLock}; + +use jrsonnet_std_sig::FNS; + +/// Documentation for a stdlib function. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct StdlibDoc { + /// Function name (without `std.` prefix). + pub name: &'static str, + /// Function signature, e.g. `(arr, func)`. + pub signature: String, + /// Short description. + pub description: &'static str, + /// Example usage (optional). + pub example: Option<&'static str>, +} + +impl StdlibDoc { + /// Format as markdown for hover display. + #[must_use] + pub fn to_markdown(&self) -> String { + let mut md = format!("```jsonnet\nstd.{}{})\n```\n\n", self.name, self.signature); + md.push_str(self.description); + if let Some(example) = self.example { + md.push_str("\n\n**Example:**\n```jsonnet\n"); + md.push_str(example); + md.push_str("\n```"); + } + md + } +} + +/// Get documentation for a stdlib function by name. +pub fn get_stdlib_doc(name: &str) -> Option<&'static StdlibDoc> { + STDLIB_DOCS.get().and_then(|docs| docs.get(name)) +} + +/// Get all stdlib function docs for completion. +pub fn get_all_stdlib_docs() -> impl Iterator { + ensure_initialized(); + STDLIB_DOCS.get().into_iter().flat_map(|docs| docs.values()) +} + +static STDLIB_DOCS: OnceLock> = OnceLock::new(); + +/// Generate signature string from function parameters. +fn build_signature(spec_fn: &jrsonnet_std_sig::StdFn) -> String { + let params: Vec = spec_fn + .params + .iter() + .map(|p| { + if p.has_default { + format!("{}=...", p.name) + } else { + p.name.to_string() + } + }) + .collect(); + + format!("({}", params.join(", ")) +} + +fn init_stdlib_docs() -> HashMap<&'static str, StdlibDoc> { + FNS.iter() + .map(|spec_fn| { + let doc = StdlibDoc { + name: spec_fn.name, + signature: build_signature(spec_fn), + description: spec_fn.doc, + example: spec_fn.example, + }; + (spec_fn.name, doc) + }) + .collect() +} + +/// Initialize the stdlib docs (called lazily). +pub fn ensure_initialized() { + STDLIB_DOCS.get_or_init(init_stdlib_docs); +} + +#[cfg(test)] +mod tests { + use indoc::indoc; + + use super::*; + + #[test] + fn test_get_stdlib_doc_map() { + ensure_initialized(); + assert_eq!( + get_stdlib_doc("map").unwrap(), + &StdlibDoc { + name: "map", + signature: "(func, arr".to_string(), + description: "Applies `func` to each element of `arr`.", + example: Some("std.map(function(x) x * 2, [1,2,3]) // [2, 4, 6]"), + } + ); + } + + #[test] + fn test_get_stdlib_doc_sort_with_optional() { + ensure_initialized(); + assert_eq!( + get_stdlib_doc("sort").unwrap(), + &StdlibDoc { + name: "sort", + signature: "(arr, keyF=...".to_string(), + description: "Sorts array, optionally by key function.", + example: Some("std.sort([3,1,2]) // [1, 2, 3]"), + } + ); + } + + #[test] + fn test_get_stdlib_doc_format_variadic() { + ensure_initialized(); + assert_eq!( + get_stdlib_doc("format").unwrap(), + &StdlibDoc { + name: "format", + signature: "(fmt".to_string(), + description: "Printf-style formatting.", + example: Some(r#"std.format("Hello %s", ["world"]) // "Hello world""#), + } + ); + } + + #[test] + fn test_to_markdown_with_example() { + ensure_initialized(); + let doc = get_stdlib_doc("map").unwrap(); + assert_eq!( + doc.to_markdown(), + indoc! {r" + ```jsonnet + std.map(func, arr) + ``` + + Applies `func` to each element of `arr`. + + **Example:** + ```jsonnet + std.map(function(x) x * 2, [1,2,3]) // [2, 4, 6] + ```"} + ); + } + + #[test] + fn test_to_markdown_no_example() { + ensure_initialized(); + let doc = get_stdlib_doc("mapWithIndex").unwrap(); + assert_eq!( + doc.to_markdown(), + indoc! {" + ```jsonnet + std.mapWithIndex(func, arr) + ``` + + Like `map`, but `func` takes `(index, element)`."} + ); + } + + #[test] + fn test_unknown_function() { + ensure_initialized(); + assert!(get_stdlib_doc("unknownFunction").is_none()); + } + + #[test] + fn test_all_spec_functions_have_docs() { + ensure_initialized(); + for spec_fn in FNS { + let doc = get_stdlib_doc(spec_fn.name); + assert!(doc.is_some(), "Missing doc for {}", spec_fn.name); + + let doc = doc.unwrap(); + assert_eq!(doc.name, spec_fn.name); + assert_eq!(doc.description, spec_fn.doc); + assert_eq!(doc.example, spec_fn.example); + } + } +} diff --git a/crates/jrsonnet-lsp-stdlib/src/lib.rs b/crates/jrsonnet-lsp-stdlib/src/lib.rs new file mode 100644 index 00000000..ebc9a37c --- /dev/null +++ b/crates/jrsonnet-lsp-stdlib/src/lib.rs @@ -0,0 +1,14 @@ +//! Standard library documentation and signatures for Jsonnet LSP. +//! +//! This crate provides: +//! - Type signatures for standard library functions +//! - Documentation strings for hover and completion + +mod docs; +mod signatures; + +pub use docs::{ensure_initialized, get_all_stdlib_docs, get_stdlib_doc, StdlibDoc}; +pub use signatures::{ + get_all_stdlib_signatures, get_stdlib_func_data, get_stdlib_func_ty, get_stdlib_signature, + import_stdlib_func_to_mut_store, import_ty_from_stdlib, stdlib_store, StdlibSignature, +}; diff --git a/crates/jrsonnet-lsp-stdlib/src/signatures.rs b/crates/jrsonnet-lsp-stdlib/src/signatures.rs new file mode 100644 index 00000000..43d9c19e --- /dev/null +++ b/crates/jrsonnet-lsp-stdlib/src/signatures.rs @@ -0,0 +1,466 @@ +//! Standard library function signatures for type checking. +//! +//! Provides parameter and return type information for stdlib functions. +//! Uses `Ty` and `FunctionData` for efficient interned type representation. +//! +//! Type signatures are generated from the `jrsonnet-std-sig` crate spec. + +use std::{collections::HashMap, sync::OnceLock}; + +use jrsonnet_lsp_types::{ + FieldDefInterned, FunctionData, MutStore, ObjectData, ParamInterned, + ReturnSpec as LspReturnSpec, Ty, TyConstraints, TyData, TyStore, +}; +use jrsonnet_std_sig::{ParamType, ReturnSpec as SigReturnSpec, FNS}; + +/// Combined storage for stdlib types and signatures. +struct StdlibData { + /// Store for interned stdlib types. + store: TyStore, + /// Map from function name to signature. + signatures: HashMap<&'static str, StdlibSignature>, +} + +/// Signature for a stdlib function. +#[derive(Debug, Clone)] +pub struct StdlibSignature { + /// Function name (without `std.` prefix). + pub name: &'static str, + /// The function type as interned Ty (references `STDLIB_DATA.store`). + pub func_ty: Ty, +} + +impl StdlibSignature { + /// Count of required parameters. + #[must_use] + pub fn required_count(&self) -> usize { + match *stdlib_store().get(self.func_ty) { + TyData::Function(ref f) => f.required_count(), + _ => 0, + } + } + + /// Total parameter count. + #[must_use] + pub fn total_count(&self) -> usize { + match *stdlib_store().get(self.func_ty) { + TyData::Function(ref f) => f.params.len(), + _ => 0, + } + } + + /// Whether the function accepts variadic arguments. + #[must_use] + pub fn variadic(&self) -> bool { + match *stdlib_store().get(self.func_ty) { + TyData::Function(ref f) => f.variadic, + _ => false, + } + } + + /// Get the function data from the global store as an owned copy. + #[must_use] + pub fn func_data(&self) -> Option { + match *stdlib_store().get(self.func_ty) { + TyData::Function(ref f) => Some(f.clone()), + _ => None, + } + } +} + +static STDLIB_DATA: OnceLock = OnceLock::new(); + +/// Get the global stdlib type store. +pub fn stdlib_store() -> &'static TyStore { + &STDLIB_DATA.get_or_init(init_stdlib_data).store +} + +/// Get the signature for a stdlib function by name. +pub fn get_stdlib_signature(name: &str) -> Option<&'static StdlibSignature> { + STDLIB_DATA + .get_or_init(init_stdlib_data) + .signatures + .get(name) +} + +/// Get the function type (as Ty) for a stdlib function by name. +#[must_use] +pub fn get_stdlib_func_ty(name: &str) -> Option { + get_stdlib_signature(name).map(|s| s.func_ty) +} + +/// Get the function data for a stdlib function by name. +pub fn get_stdlib_func_data(name: &str) -> Option { + get_stdlib_signature(name).and_then(StdlibSignature::func_data) +} + +/// Get all stdlib signatures. +pub fn get_all_stdlib_signatures() -> impl Iterator { + ensure_initialized(); + STDLIB_DATA + .get() + .into_iter() + .flat_map(|data| data.signatures.values()) +} + +/// Initialize the stdlib signatures (called lazily). +pub fn ensure_initialized() { + STDLIB_DATA.get_or_init(init_stdlib_data); +} + +/// Convert a spec `ParamType` to an interned Ty. +fn param_type_to_ty(store: &mut TyStore, pt: ParamType) -> Ty { + match pt { + ParamType::Any => Ty::ANY, + ParamType::Null => Ty::NULL, + ParamType::Bool => Ty::BOOL, + ParamType::Number => Ty::NUMBER, + ParamType::String => Ty::STRING, + ParamType::Char => Ty::CHAR, + ParamType::Array => store.array(Ty::ANY), + ParamType::ArrayNumber => store.array(Ty::NUMBER), + ParamType::ArrayString => store.array(Ty::STRING), + ParamType::ArrayChar => store.array(Ty::CHAR), + ParamType::ArrayBool => store.array(Ty::BOOL), + ParamType::Object => store.object_any(), + ParamType::Function => store.function_any(), + ParamType::StringOrArray => { + let arr = store.array(Ty::ANY); + store.union(vec![Ty::STRING, arr]) + } + ParamType::Lengthable => { + let arr = store.array(Ty::ANY); + let obj = store.object_any(); + let func = store.function_any(); + store.union(vec![arr, Ty::STRING, obj, func]) + } + } +} + +/// Convert a spec `ReturnSpec` to an LSP `ReturnSpec`. +fn convert_return_spec(store: &mut TyStore, rs: SigReturnSpec) -> LspReturnSpec { + match rs { + SigReturnSpec::Fixed(pt) => LspReturnSpec::Fixed(param_type_to_ty(store, pt)), + SigReturnSpec::SameAsArg(idx) => LspReturnSpec::SameAsArg(idx), + SigReturnSpec::NonNegative => LspReturnSpec::NonNegative, + SigReturnSpec::ArrayOfFuncReturn(idx) => LspReturnSpec::ArrayOfFuncReturn(idx), + SigReturnSpec::ArrayWithSameElements(idx) => LspReturnSpec::ArrayWithSameElements(idx), + SigReturnSpec::SetWithSameElements(idx) => LspReturnSpec::SetWithSameElements(idx), + SigReturnSpec::ObjectValuesType(idx) => LspReturnSpec::ObjectValuesType(idx), + SigReturnSpec::FlatMapResult(idx) => LspReturnSpec::FlatMapResult(idx), + SigReturnSpec::Any => LspReturnSpec::default(), + } +} + +fn init_stdlib_data() -> StdlibData { + let mut store = TyStore::new(); + + // Generate signatures from the spec + let sigs: Vec = FNS + .iter() + .map(|spec_fn| { + // Convert parameters + let params: Vec = spec_fn + .params + .iter() + .map(|p| ParamInterned { + name: p.name.to_string(), + ty: param_type_to_ty(&mut store, p.ty), + has_default: p.has_default, + }) + .collect(); + + // Convert return spec + let return_spec = convert_return_spec(&mut store, spec_fn.return_spec); + + // Create function type + let func_data = FunctionData { + params, + return_spec, + variadic: spec_fn.variadic, + }; + let func_ty = store.intern(TyData::Function(func_data)); + + StdlibSignature { + name: spec_fn.name, + func_ty, + } + }) + .collect(); + + let signatures: HashMap<&'static str, StdlibSignature> = + sigs.into_iter().map(|s| (s.name, s)).collect(); + + StdlibData { store, signatures } +} + +/// Look up a stdlib function type by name and import it into a `MutStore`. +/// +/// Returns the function type if found, interned into the local store. +pub fn import_stdlib_func_to_mut_store(store: &mut MutStore, name: &str) -> Option { + let func_ty = get_stdlib_func_ty(name)?; + Some(import_ty_from_stdlib(store, func_ty)) +} + +/// Import a type from the stdlib store into a `MutStore`. +pub fn import_ty_from_stdlib(store: &mut MutStore, ty: Ty) -> Ty { + // Well-known constants are the same in all stores + if ty.is_well_known() { + return ty; + } + + let source = stdlib_store(); + match *source.get(ty) { + TyData::Array { elem, .. } => { + let imported_elem = import_ty_from_stdlib(store, elem); + store.array(imported_elem) + } + TyData::Tuple { ref elems } => { + let imported_elems: Vec<_> = elems + .iter() + .map(|&e| import_ty_from_stdlib(store, e)) + .collect(); + store.tuple(imported_elems) + } + TyData::Union(ref variants) => { + let imported_variants: Vec<_> = variants + .iter() + .map(|&v| import_ty_from_stdlib(store, v)) + .collect(); + store.union(imported_variants) + } + TyData::Object(ref obj) => { + let imported_fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + ( + name.clone(), + FieldDefInterned { + ty: import_ty_from_stdlib(store, field.ty), + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + store.object(ObjectData { + fields: imported_fields, + has_unknown: obj.has_unknown, + }) + } + TyData::AttrsOf { value } => { + let imported_value = import_ty_from_stdlib(store, value); + store.attrs_of(imported_value) + } + TyData::Function(ref func) => { + let imported_params: Vec<_> = func + .params + .iter() + .map(|p| ParamInterned { + name: p.name.clone(), + ty: import_ty_from_stdlib(store, p.ty), + has_default: p.has_default, + }) + .collect(); + let imported_return = match &func.return_spec { + LspReturnSpec::Fixed(ret) => { + LspReturnSpec::Fixed(import_ty_from_stdlib(store, *ret)) + } + other => other.clone(), + }; + store.function(FunctionData { + params: imported_params, + return_spec: imported_return, + variadic: func.variadic, + }) + } + TyData::Sum(ref variants) => { + let imported_variants: Vec<_> = variants + .iter() + .map(|&v| import_ty_from_stdlib(store, v)) + .collect(); + store.sum(imported_variants) + } + TyData::BoundedNumber(bounds) => store.bounded_number(bounds), + TyData::LiteralString(ref s) => store.literal_string(s.clone()), + TyData::TypeVar { + id, + ref constraints, + } => { + let imported_upper = constraints + .upper_bound + .map(|b| import_ty_from_stdlib(store, b)); + store.type_var( + id, + TyConstraints { + must_be_indexable: constraints.must_be_indexable, + must_support_fields: constraints.must_support_fields, + must_be_callable: constraints.must_be_callable, + upper_bound: imported_upper, + }, + ) + } + // Primitives are the same everywhere + TyData::Any + | TyData::Never + | TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::String + | TyData::Char => ty, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_get_stdlib_signature_map() { + ensure_initialized(); + let sig = get_stdlib_signature("map").unwrap(); + assert_eq!(sig.name, "map"); + + let func_data = sig.func_data().unwrap(); + // Extract (name, has_default) for structural comparison + let params: Vec<_> = func_data + .params + .iter() + .map(|p| (p.name.as_str(), p.has_default)) + .collect(); + assert_eq!(params, vec![("func", false), ("arr", false)]); + assert_eq!( + (func_data.variadic, &func_data.return_spec), + (false, &LspReturnSpec::ArrayOfFuncReturn(0)) + ); + } + + #[test] + fn test_get_stdlib_signature_sort_with_optional() { + ensure_initialized(); + let sig = get_stdlib_signature("sort").unwrap(); + assert_eq!(sig.name, "sort"); + + let func_data = sig.func_data().unwrap(); + let params: Vec<_> = func_data + .params + .iter() + .map(|p| (p.name.as_str(), p.has_default)) + .collect(); + assert_eq!(params, vec![("arr", false), ("keyF", true)]); + assert_eq!( + (func_data.variadic, &func_data.return_spec), + (false, &LspReturnSpec::SameAsArg(0)) + ); + } + + #[test] + fn test_get_stdlib_signature_format_variadic() { + ensure_initialized(); + let sig = get_stdlib_signature("format").unwrap(); + assert_eq!(sig.name, "format"); + + let func_data = sig.func_data().unwrap(); + let params: Vec<_> = func_data + .params + .iter() + .map(|p| (p.name.as_str(), p.has_default)) + .collect(); + assert_eq!(params, vec![("fmt", false)]); + assert_eq!( + (func_data.variadic, &func_data.return_spec), + (true, &LspReturnSpec::Fixed(Ty::STRING)) + ); + } + + #[test] + fn test_get_stdlib_signature_length() { + ensure_initialized(); + let sig = get_stdlib_signature("length").unwrap(); + assert_eq!(sig.name, "length"); + + let func_data = sig.func_data().unwrap(); + let params: Vec<_> = func_data + .params + .iter() + .map(|p| (p.name.as_str(), p.has_default)) + .collect(); + assert_eq!(params, vec![("x", false)]); + assert_eq!( + (func_data.variadic, &func_data.return_spec), + (false, &LspReturnSpec::NonNegative) + ); + + // Verify the parameter is a union type + let param_ty = func_data.params[0].ty; + assert!(matches!(*stdlib_store().get(param_ty), TyData::Union(_))); + } + + #[test] + fn test_unknown_function() { + ensure_initialized(); + assert!(get_stdlib_signature("unknownFunction").is_none()); + } + + #[test] + fn test_required_count() { + ensure_initialized(); + + // sort has 1 required, 1 optional + let sort = get_stdlib_signature("sort").unwrap(); + assert_eq!(sort.required_count(), 1); + assert_eq!(sort.total_count(), 2); + + // map has 2 required + let map = get_stdlib_signature("map").unwrap(); + assert_eq!(map.required_count(), 2); + assert_eq!(map.total_count(), 2); + } + + #[test] + fn test_set_functions_return_set_type() { + ensure_initialized(); + + // std.set returns a set + let set = get_stdlib_signature("set").unwrap(); + let func_data = set.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::SetWithSameElements(0)); + + // std.uniq returns a set + let uniq = get_stdlib_signature("uniq").unwrap(); + let func_data = uniq.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::SetWithSameElements(0)); + + // std.setUnion returns a set + let set_union = get_stdlib_signature("setUnion").unwrap(); + let func_data = set_union.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::SetWithSameElements(0)); + + // std.setInter returns a set + let set_inter = get_stdlib_signature("setInter").unwrap(); + let func_data = set_inter.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::SetWithSameElements(0)); + + // std.setDiff returns a set + let set_diff = get_stdlib_signature("setDiff").unwrap(); + let func_data = set_diff.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::SetWithSameElements(0)); + + // std.setMember returns bool (not a set) + let set_member = get_stdlib_signature("setMember").unwrap(); + let func_data = set_member.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::Fixed(Ty::BOOL)); + } + + #[test] + fn test_all_spec_functions_have_signatures() { + ensure_initialized(); + for spec_fn in FNS { + let sig = get_stdlib_signature(spec_fn.name); + assert!(sig.is_some(), "Missing signature for {}", spec_fn.name); + } + } +} diff --git a/crates/jrsonnet-lsp-types/Cargo.toml b/crates/jrsonnet-lsp-types/Cargo.toml new file mode 100644 index 00000000..19d3ce27 --- /dev/null +++ b/crates/jrsonnet-lsp-types/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "jrsonnet-lsp-types" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Type system for jrsonnet LSP" + +[dependencies] +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +rustc-hash.workspace = true +thiserror.workspace = true + +[dev-dependencies] +assert_matches = "1.5.0" +rstest = "0.23" + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-types/src/display.rs b/crates/jrsonnet-lsp-types/src/display.rs new file mode 100644 index 00000000..dc67c2ba --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/display.rs @@ -0,0 +1,427 @@ +//! Type display formatting with configurable verbosity. +//! +//! Provides `DisplayContext` for controlling how types are formatted: +//! - Compact mode: elide long lists, abbreviate objects +//! - Detailed mode: show all fields, full signatures + +use crate::{NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyData, TypeStoreOps}; + +/// Display style for types. +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +pub enum DisplayStyle { + /// Compact display - elide long lists, abbreviate objects. + #[default] + Compact, + /// Detailed display - show all fields, full signatures. + Detailed, +} + +/// Context for displaying types with configurable verbosity. +#[derive(Clone)] +pub struct DisplayContext<'a, S: TypeStoreOps> { + /// Display style. + pub style: DisplayStyle, + /// Type store for resolving types. + store: &'a S, + /// Maximum depth for nested types (to prevent infinite recursion). + pub max_depth: usize, + /// Maximum items to show in arrays/objects before eliding. + pub max_items: usize, + /// Maximum union members to show before eliding. + pub max_union_members: usize, +} + +impl<'a, S: TypeStoreOps> DisplayContext<'a, S> { + /// Create a compact display context. + pub fn compact(store: &'a S) -> Self { + Self { + style: DisplayStyle::Compact, + store, + max_depth: 3, + max_items: 3, + max_union_members: 3, + } + } + + /// Create a detailed display context. + pub fn detailed(store: &'a S) -> Self { + Self { + style: DisplayStyle::Detailed, + store, + max_depth: 10, + max_items: 20, + max_union_members: 10, + } + } + + /// Format a type using this context. + #[must_use] + pub fn format(&self, ty: Ty) -> String { + self.format_impl(ty, 0) + } + + fn format_impl(&self, ty: Ty, depth: usize) -> String { + if depth > self.max_depth { + return "...".to_string(); + } + + match self.store.get_data(ty) { + TyData::Any => "any".to_string(), + TyData::Never => "never".to_string(), + TyData::Null => "null".to_string(), + TyData::Bool => "boolean".to_string(), + TyData::True => "true".to_string(), + TyData::False => "false".to_string(), + TyData::Number => "number".to_string(), + TyData::BoundedNumber(bounds) => self.format_bounded_number(&bounds), + TyData::String => "string".to_string(), + TyData::Char => "char".to_string(), + TyData::LiteralString(s) => { + if self.style == DisplayStyle::Compact && s.len() > 20 { + format!("\"{}...\"", &s[..17]) + } else { + format!("\"{s}\"") + } + } + TyData::Array { elem, is_set } => { + let elem_str = self.format_impl(elem, depth + 1); + if is_set { + format!("set<{elem_str}>") + } else { + format!("array<{elem_str}>") + } + } + TyData::Tuple { elems } => self.format_tuple(&elems, depth), + TyData::Object(obj) => self.format_object(&obj, depth), + TyData::AttrsOf { value } => { + format!("object<{}>", self.format_impl(value, depth + 1)) + } + TyData::Function(func) => { + let ret = match &func.return_spec { + ReturnSpec::Fixed(ret) => *ret, + _ => Ty::ANY, // For complex return specs, show "any" + }; + self.format_function(&func.params, ret, depth) + } + TyData::Union(types) => self.format_union(&types, depth), + TyData::Sum(types) => self.format_sum(&types, depth), + TyData::TypeVar { id, constraints } => { + let mut s = id.to_string(); + if !constraints.is_empty() && self.style == DisplayStyle::Detailed { + let mut parts: Vec = Vec::new(); + if constraints.must_be_indexable { + parts.push("indexable".to_string()); + } + if constraints.must_support_fields { + parts.push("object-like".to_string()); + } + if constraints.must_be_callable { + parts.push("callable".to_string()); + } + if let Some(bound) = constraints.upper_bound { + parts.push(format!("<: {}", self.format_impl(bound, depth + 1))); + } + if !parts.is_empty() { + s.push_str(" where "); + s.push_str(&parts.join(", ")); + } + } + s + } + } + } + + fn format_bounded_number(&self, bounds: &NumBounds) -> String { + match (bounds.min_f64(), bounds.max_f64()) { + (None, None) => "number".to_string(), + (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { + if lo.fract() == 0.0 { + format!("{lo:.0}") + } else { + format!("{lo}") + } + } + (Some(lo), Some(hi)) => { + if self.style == DisplayStyle::Compact { + "number".to_string() + } else { + format!("number[{lo}..{hi}]") + } + } + (Some(lo), None) => { + if self.style == DisplayStyle::Compact { + "number".to_string() + } else { + format!("number[{lo}..Inf]") + } + } + (None, Some(hi)) => { + if self.style == DisplayStyle::Compact { + "number".to_string() + } else { + format!("number[-Inf..{hi}]") + } + } + } + } + + fn format_tuple(&self, elems: &[Ty], depth: usize) -> String { + if elems.is_empty() { + return "[]".to_string(); + } + + let show_count = if self.style == DisplayStyle::Compact { + self.max_items.min(elems.len()) + } else { + elems.len() + }; + + let elem_strs: Vec = elems + .iter() + .take(show_count) + .map(|&t| self.format_impl(t, depth + 1)) + .collect(); + + let elided = if elems.len() > show_count { + format!(", ...{} more", elems.len() - show_count) + } else { + String::new() + }; + + format!("[{}{}]", elem_strs.join(", "), elided) + } + + fn format_object(&self, obj: &ObjectData, depth: usize) -> String { + if obj.fields.is_empty() && !obj.has_unknown { + return "{}".to_string(); + } + + if obj.has_unknown && obj.fields.is_empty() { + return "object".to_string(); + } + + // Sort fields by name for consistent output + let mut fields: Vec<_> = obj.fields.iter().collect(); + fields.sort_by(|(a, _), (b, _)| a.cmp(b)); + + let show_count = if self.style == DisplayStyle::Compact { + self.max_items.min(fields.len()) + } else { + fields.len() + }; + + let field_strs: Vec = fields + .iter() + .take(show_count) + .map(|(name, field)| { + if self.style == DisplayStyle::Detailed { + format!("{}: {}", name, self.format_impl(field.ty, depth + 1)) + } else { + name.to_string() + } + }) + .collect(); + + let elided = if fields.len() > show_count { + format!(", ...{} more", fields.len() - show_count) + } else { + String::new() + }; + + let suffix = if obj.has_unknown { ", ..." } else { "" }; + + format!("{{ {}{}{} }}", field_strs.join(", "), elided, suffix) + } + + fn format_function(&self, params: &[ParamInterned], ret: Ty, depth: usize) -> String { + let param_strs: Vec = if self.style == DisplayStyle::Detailed { + params + .iter() + .map(|p| { + let ty_str = self.format_impl(p.ty, depth + 1); + if p.has_default { + format!("{}?: {}", p.name, ty_str) + } else { + format!("{}: {}", p.name, ty_str) + } + }) + .collect() + } else { + params.iter().map(|p| p.name.to_string()).collect() + }; + + if self.style == DisplayStyle::Detailed && !ret.is_any() { + format!( + "({}) -> {}", + param_strs.join(", "), + self.format_impl(ret, depth + 1) + ) + } else { + format!("function({})", param_strs.join(", ")) + } + } + + fn format_union(&self, types: &[Ty], depth: usize) -> String { + if types.is_empty() { + return "never".to_string(); + } + + let show_count = if self.style == DisplayStyle::Compact { + self.max_union_members.min(types.len()) + } else { + types.len() + }; + + let parts: Vec = types + .iter() + .take(show_count) + .map(|&t| self.format_impl(t, depth + 1)) + .collect(); + + let elided = if types.len() > show_count { + format!(" | ...{} more", types.len() - show_count) + } else { + String::new() + }; + + format!("{}{}", parts.join(" | "), elided) + } + + fn format_sum(&self, types: &[Ty], depth: usize) -> String { + let parts: Vec = types + .iter() + .map(|&t| self.format_impl(t, depth + 1)) + .collect(); + parts.join(" & ") + } +} + +/// Wrapper for displaying a type with a context using `std::fmt`. +pub struct DisplayTy<'a, S: TypeStoreOps> { + ty: Ty, + cx: &'a DisplayContext<'a, S>, +} + +impl<'a, S: TypeStoreOps> DisplayTy<'a, S> { + /// Create a new display wrapper. + #[must_use] + pub fn new(ty: Ty, cx: &'a DisplayContext<'a, S>) -> Self { + Self { ty, cx } + } +} + +impl std::fmt::Display for DisplayTy<'_, S> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.cx.format(self.ty)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{FieldDefInterned, FieldVis, FunctionData, TyStore}; + + #[test] + fn test_compact_vs_detailed_object() { + let mut store = TyStore::new(); + + // Create an object with many fields + let fields: Vec<_> = (0..10) + .map(|i| { + ( + format!("field{i}"), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + ) + }) + .collect(); + let obj_ty = store.object(ObjectData { + fields, + has_unknown: false, + }); + + let compact = DisplayContext::compact(&store); + let detailed = DisplayContext::detailed(&store); + + let compact_str = compact.format(obj_ty); + let detailed_str = detailed.format(obj_ty); + + // Compact should elide fields + assert!(compact_str.contains("...")); + assert!(compact_str.contains("more")); + + // Detailed should show all + assert!(detailed_str.contains("field9")); + } + + #[test] + fn test_function_display() { + let mut store = TyStore::new(); + + let func_ty = store.function(FunctionData { + params: vec![ + ParamInterned { + name: "x".into(), + ty: Ty::NUMBER, + has_default: false, + }, + ParamInterned { + name: "y".into(), + ty: Ty::STRING, + has_default: true, + }, + ], + return_spec: ReturnSpec::Fixed(Ty::BOOL), + variadic: false, + }); + + let compact = DisplayContext::compact(&store); + let detailed = DisplayContext::detailed(&store); + + let compact_str = compact.format(func_ty); + let detailed_str = detailed.format(func_ty); + + // Compact: function(x, y) + assert_eq!(compact_str, "function(x, y)"); + + // Detailed: (x: number, y?: string) -> boolean + assert!(detailed_str.contains("number")); + assert!(detailed_str.contains("y?:")); + assert!(detailed_str.contains("->")); + } + + #[test] + fn test_max_depth() { + let mut store = TyStore::new(); + + // Create deeply nested array: array>> + let mut ty = Ty::NUMBER; + for _ in 0..20 { + ty = store.array(ty); + } + + let compact = DisplayContext::compact(&store); + let result = compact.format(ty); + + // Should hit max depth and show "..." + assert!(result.contains("...")); + } + + #[test] + fn test_union_elision() { + let mut store = TyStore::new(); + + // Create a union with many members + let members: Vec = (0..10).map(|_| Ty::STRING).collect(); + let union_ty = store.union(members); + + let compact = DisplayContext::compact(&store); + let result = compact.format(union_ty); + + // Compact should elide union members + assert!(result.contains("more") || result.matches(" | ").count() < 9); + } +} diff --git a/crates/jrsonnet-lsp-types/src/global_store.rs b/crates/jrsonnet-lsp-types/src/global_store.rs new file mode 100644 index 00000000..cdb6f61f --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/global_store.rs @@ -0,0 +1,216 @@ +//! Global type store - shared across all files in a session. +//! +//! This store is thread-safe and holds all interned types that persist +//! across file analyses. Types from any file can reference types in +//! this store, enabling cross-file type sharing. + +use std::sync::RwLock; + +use rustc_hash::FxHashMap; + +use crate::store::{Ty, TyData}; + +static ANY_TY_DATA: TyData = TyData::Any; + +fn to_u32(value: usize) -> Option { + u32::try_from(value).ok() +} + +/// Internal storage implementation shared between global and local stores. +#[derive(Debug, Clone)] +pub(crate) struct TyStoreInner { + /// Type data indexed by Ty ID. + pub(crate) data: Vec, + /// Reverse mapping for deduplication. + pub(crate) dedup: FxHashMap, +} + +impl TyStoreInner { + /// Create a new store with well-known types pre-populated. + pub(crate) fn with_builtins() -> Self { + let mut inner = Self { + data: Vec::with_capacity(64), + dedup: FxHashMap::default(), + }; + inner.init_builtins(); + inner + } + + /// Initialize built-in well-known types. + fn init_builtins(&mut self) { + // Must match the order of Ty constants! + let builtins = [ + TyData::Any, // 0 = ANY + TyData::Never, // 1 = NEVER + TyData::Null, // 2 = NULL + TyData::Bool, // 3 = BOOL + TyData::True, // 4 = TRUE + TyData::False, // 5 = FALSE + TyData::Number, // 6 = NUMBER + TyData::String, // 7 = STRING + TyData::Char, // 8 = CHAR + // Padding to RESERVED_COUNT + TyData::Any, // 9 - reserved + TyData::Any, // 10 - reserved + TyData::Any, // 11 - reserved + TyData::Any, // 12 - reserved + TyData::Any, // 13 - reserved + TyData::Any, // 14 - reserved + TyData::Any, // 15 - reserved + ]; + + for (data, raw_id) in builtins.into_iter().zip(0_u32..) { + let ty = Ty::from_raw(raw_id); + self.data.push(data.clone()); + // Only dedup the non-padding entries + if raw_id < 9 { + self.dedup.insert(data, ty); + } + } + + debug_assert_eq!(self.data.len(), Ty::RESERVED_COUNT as usize); + } + + /// Get type data by index. + #[inline] + pub(crate) fn get_data(&self, index: u32) -> &TyData { + let idx = usize::try_from(index).ok(); + idx.and_then(|i| self.data.get(i)) + .or_else(|| self.data.first()) + .unwrap_or(&ANY_TY_DATA) + } + + /// Get the number of types in this store. + pub(crate) fn len(&self) -> usize { + self.data.len() + } + + /// Intern a type, returning existing ID if already present. + /// The `make_ty` function creates the Ty from the raw index. + pub(crate) fn intern(&mut self, data: TyData, make_ty: impl Fn(u32) -> Ty) -> Ty { + // Check if already interned + if let Some(&existing) = self.dedup.get(&data) { + return existing; + } + + // Intern new type + let Some(raw_id) = to_u32(self.data.len()) else { + return Ty::ANY; + }; + let id = make_ty(raw_id); + self.data.push(data.clone()); + self.dedup.insert(data, id); + id + } +} + +/// Global type store - shared across all files in a session. +/// +/// Thread-safe via `RwLock`. All persistent types live here. +/// Types interned here have `Ty` values without the `LOCAL_BIT` set. +#[derive(Debug)] +pub struct GlobalTyStore { + inner: RwLock, +} + +impl GlobalTyStore { + /// Create a new global store with built-in types. + #[must_use] + pub fn new() -> Self { + Self { + inner: RwLock::new(TyStoreInner::with_builtins()), + } + } + + /// Get read access to the inner store. + pub(crate) fn read(&self) -> std::sync::RwLockReadGuard<'_, TyStoreInner> { + self.inner + .read() + .unwrap_or_else(std::sync::PoisonError::into_inner) + } + + /// Get write access to the inner store. + pub(crate) fn write(&self) -> std::sync::RwLockWriteGuard<'_, TyStoreInner> { + self.inner + .write() + .unwrap_or_else(std::sync::PoisonError::into_inner) + } + + /// Get type data for a global Ty. + pub fn get_data(&self, ty: Ty) -> TyData { + if ty.is_local() { + return TyData::Any; + } + self.read().get_data(ty.raw_index()).clone() + } + + /// Intern a type into the global store. + /// + /// Returns an existing type if the data is already interned. + pub fn intern(&self, data: TyData) -> Ty { + // Fast path for well-known types + if let Some(ty) = Ty::well_known_for_data(&data) { + return ty; + } + + let mut inner = self.write(); + inner.intern(data, Ty::from_raw) + } + + /// Get the number of types in the global store. + pub fn len(&self) -> usize { + self.read().len() + } + + /// Check if empty (never true after init). + pub fn is_empty(&self) -> bool { + self.read().len() == 0 + } +} + +impl Default for GlobalTyStore { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_global_store_new() { + let store = GlobalTyStore::new(); + // Should have built-in types + assert!(store.len() >= Ty::RESERVED_COUNT as usize); + } + + #[test] + fn test_global_store_intern_dedup() { + let store = GlobalTyStore::new(); + + let arr1 = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let arr2 = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + assert_eq!(arr1, arr2); + assert!(!arr1.is_local()); + } + + #[test] + fn test_global_store_well_known() { + let store = GlobalTyStore::new(); + + // Well-known types should return the constant + let any = store.intern(TyData::Any); + assert_eq!(any, Ty::ANY); + + let num = store.intern(TyData::Number); + assert_eq!(num, Ty::NUMBER); + } +} diff --git a/crates/jrsonnet-lsp-types/src/lib.rs b/crates/jrsonnet-lsp-types/src/lib.rs new file mode 100644 index 00000000..09a45ec8 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/lib.rs @@ -0,0 +1,46 @@ +//! Jsonnet type system definitions. +//! +//! This crate provides the type definitions and operations used throughout +//! the Jsonnet LSP for type inference, type checking, and flow typing. +//! +//! The type system uses a simplified model where: +//! - `Any` is the top type (all values) +//! - `Never` is the bottom type (no values, unreachable code) +//! - Union types represent values that could be one of several types +//! +//! # Module Organization +//! +//! - [`store`]: Type storage with interned `Ty` references for efficient representation +//! - [`global_store`]: Thread-safe global type store shared across all files +//! - [`local_store`]: Per-file local type store for analysis +//! - [`mut_store`]: Mutable store combining global and local for analysis +//! - [`operations`]: Type operations for checking and combining types +//! - [`unification`]: Type unification with variance handling +//! - [`display`]: Type display formatting with configurable verbosity + +pub mod display; +pub mod global_store; +pub mod local_store; +pub mod mut_store; +mod operations; +pub mod semantic_tokens; +pub mod store; +pub mod subst; +pub mod unification; + +pub use display::{DisplayContext, DisplayStyle, DisplayTy}; +pub use global_store::GlobalTyStore; +pub use local_store::LocalTyStore; +pub use mut_store::MutStore; +pub use operations::*; +pub use semantic_tokens::{SemanticTokenModifierName, SemanticTokenTypeName}; +pub use store::{ + reset_store, with_store, FieldDefInterned, FieldVis, FunctionData, GlobalTy, NotGlobalTy, + NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyConstraints, TyData, TyStore, + TySubstitution, TyVarId, TypeStoreOps, +}; +pub use subst::TySubst; +pub use unification::{ + is_subtype_ty, types_equivalent_ty, unify_ty, PathElement, UnifyError, UnifyReason, + UnifyResult, Variance, +}; diff --git a/crates/jrsonnet-lsp-types/src/local_store.rs b/crates/jrsonnet-lsp-types/src/local_store.rs new file mode 100644 index 00000000..3c350e6b --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/local_store.rs @@ -0,0 +1,157 @@ +//! Local type store - per-file temporary storage during analysis. +//! +//! Local types are created during file analysis and then merged into +//! the global store when analysis completes. Local types have the +//! `LOCAL_BIT` set in their `Ty` values. + +use rustc_hash::FxHashMap; + +use crate::store::{Ty, TyData}; + +fn to_u32(value: usize) -> Option { + u32::try_from(value).ok() +} + +static ANY_TY_DATA: TyData = TyData::Any; + +/// Per-file local type store - temporary during analysis. +/// +/// Types interned here have `Ty` values with the `LOCAL_BIT` set. +/// After analysis, these types are merged into the global store. +#[derive(Debug, Clone)] +pub struct LocalTyStore { + /// Type data indexed by local Ty index. + data: Vec, + /// Reverse mapping for deduplication. + dedup: FxHashMap, +} + +impl LocalTyStore { + /// Create a new empty local store. + #[must_use] + pub fn new() -> Self { + Self { + data: Vec::new(), + dedup: FxHashMap::default(), + } + } + + /// Get type data by local index. + #[must_use] + pub fn get_data(&self, ty: Ty) -> &TyData { + if !ty.is_local() { + return &ANY_TY_DATA; + } + self.data + .get(ty.raw_index() as usize) + .unwrap_or(&ANY_TY_DATA) + } + + /// Get the number of local types. + #[must_use] + pub fn len(&self) -> usize { + self.data.len() + } + + /// Check if empty. + #[must_use] + pub fn is_empty(&self) -> bool { + self.data.is_empty() + } + + /// Intern a type into the local store. + /// + /// Returns an existing local type if the data is already interned locally. + /// Note: Does NOT check the global store - caller should check global first. + /// + pub fn intern(&mut self, data: TyData) -> Ty { + // Check if already interned locally + if let Some(&existing) = self.dedup.get(&data) { + return existing; + } + + // Intern new local type + let Some(raw_id) = to_u32(self.data.len()) else { + return Ty::ANY; + }; + let id = Ty::from_raw_local(raw_id); + self.data.push(data.clone()); + self.dedup.insert(data, id); + id + } + + /// Iterate over all local types with their indices. + pub fn iter(&self) -> impl Iterator { + self.data + .iter() + .enumerate() + .filter_map(|(i, data)| to_u32(i).map(|raw_id| (Ty::from_raw_local(raw_id), data))) + } + + /// Consume the store and return the type data vector. + #[must_use] + pub fn into_data(self) -> Vec { + self.data + } +} + +impl Default for LocalTyStore { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_local_store_intern() { + let mut store = LocalTyStore::new(); + + let arr1 = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let arr2 = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + assert_eq!(arr1, arr2); + assert!(arr1.is_local()); + assert_eq!(arr1.raw_index(), 0); + } + + #[test] + fn test_local_store_get_data() { + let mut store = LocalTyStore::new(); + + let arr = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let data = store.get_data(arr); + + assert!(matches!(data, TyData::Array { elem, .. } if *elem == Ty::NUMBER)); + } + + #[test] + fn test_local_store_iter() { + let mut store = LocalTyStore::new(); + + store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + store.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + + let types: Vec<_> = store.iter().collect(); + assert_eq!(types.len(), 2); + assert!(types[0].0.is_local()); + assert!(types[1].0.is_local()); + } +} diff --git a/crates/jrsonnet-lsp-types/src/mut_store.rs b/crates/jrsonnet-lsp-types/src/mut_store.rs new file mode 100644 index 00000000..c95da337 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/mut_store.rs @@ -0,0 +1,33 @@ +//! Mutable store for type analysis - combines global and local stores. +//! +//! During analysis, types are looked up in the global store first, +//! and new types are created in the local store. After analysis, +//! local types are merged into the global store. + +use std::sync::Arc; + +use crate::{global_store::GlobalTyStore, local_store::LocalTyStore}; + +/// Mutable store for type analysis - combines global and local stores. +/// +/// Provides a unified interface for type operations during file analysis. +/// - Lookups check global store first, then local store +/// - New types are created in the local store +/// - After analysis, call `into_local()` to get the local types for merging +/// +/// Uses `Arc` for easy sharing. +/// +/// Note: `MutStore` is intentionally not Clone. During analysis, a single +/// `MutStore` is used and passed by mutable reference. After analysis, +/// the local types are merged into the global store via `TySubst::merge`. +#[derive(Debug)] +pub struct MutStore { + /// Reference to the global store (read-only during analysis). + global: Arc, + /// Local store for types created during this analysis. + local: LocalTyStore, +} + +mod analysis; +mod core; +mod type_store_ops_impl; diff --git a/crates/jrsonnet-lsp-types/src/mut_store/analysis.rs b/crates/jrsonnet-lsp-types/src/mut_store/analysis.rs new file mode 100644 index 00000000..ef0f081d --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/mut_store/analysis.rs @@ -0,0 +1,286 @@ +use super::*; +use crate::store::{ReturnSpec, Ty, TyData}; + +impl MutStore { + // ========== Type queries ========== + + /// Check if type is indexable. + #[must_use] + pub fn is_indexable(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::Any + | TyData::String + | TyData::Char + | TyData::Array { .. } + | TyData::Tuple { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } => true, + TyData::Union(types) | TyData::Sum(types) => { + types.iter().all(|&t| self.is_indexable(t)) + } + TyData::TypeVar { constraints, .. } => constraints.must_be_indexable, + _ => false, + } + } + + /// Check if type supports field access. + #[must_use] + pub fn supports_field_access(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::Any | TyData::Object(_) | TyData::AttrsOf { .. } => true, + TyData::Union(types) | TyData::Sum(types) => { + types.iter().all(|&t| self.supports_field_access(t)) + } + TyData::TypeVar { constraints, .. } => constraints.must_support_fields, + _ => false, + } + } + + /// Check if type is callable. + #[must_use] + pub fn is_callable(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::Any | TyData::Function(_) => true, + TyData::Union(types) | TyData::Sum(types) => types.iter().all(|&t| self.is_callable(t)), + TyData::TypeVar { constraints, .. } => constraints.must_be_callable, + _ => false, + } + } + + /// Check if a type has any type variables. + #[must_use] + pub fn has_type_vars(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::TypeVar { .. } => true, + TyData::Array { elem, .. } => self.has_type_vars(elem), + TyData::Tuple { elems } => elems.iter().any(|&e| self.has_type_vars(e)), + TyData::Union(variants) | TyData::Sum(variants) => { + variants.iter().any(|&v| self.has_type_vars(v)) + } + TyData::Object(obj) => obj.fields.iter().any(|(_, fd)| self.has_type_vars(fd.ty)), + TyData::AttrsOf { value } => self.has_type_vars(value), + TyData::Function(func) => { + func.params.iter().any(|p| self.has_type_vars(p.ty)) + || matches!(&func.return_spec, ReturnSpec::Fixed(ret) if self.has_type_vars(*ret)) + } + _ => false, + } + } + + /// Format a type for display. + #[must_use] + pub fn display(&self, ty: Ty) -> String { + match self.get_data(ty) { + TyData::Any => "any".to_string(), + TyData::Never => "never".to_string(), + TyData::Null => "null".to_string(), + TyData::Bool => "boolean".to_string(), + TyData::True => "true".to_string(), + TyData::False => "false".to_string(), + TyData::Number => "number".to_string(), + TyData::BoundedNumber(bounds) => match (bounds.min_f64(), bounds.max_f64()) { + (None, None) => "number".to_string(), + (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { + if lo.fract() == 0.0 { + format!("{lo:.0}") + } else { + format!("{lo}") + } + } + (Some(lo), Some(hi)) => format!("number[{lo}..{hi}]"), + (Some(lo), None) => format!("number[{lo}..]"), + (None, Some(hi)) => format!("number[..{hi}]"), + }, + TyData::String => "string".to_string(), + TyData::Char => "char".to_string(), + TyData::LiteralString(s) => format!("\"{s}\""), + TyData::Array { elem, is_set } => { + let base = format!("array<{}>", self.display(elem)); + if is_set { + format!("set<{}>", self.display(elem)) + } else { + base + } + } + TyData::Tuple { elems } => { + let types: Vec<_> = elems.iter().map(|&t| self.display(t)).collect(); + format!("[{}]", types.join(", ")) + } + TyData::Object(obj) => { + if obj.fields.is_empty() && !obj.has_unknown { + "{}".to_string() + } else if obj.has_unknown { + "object".to_string() + } else { + let mut fields: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + fields.sort_unstable(); + format!("{{ {} }}", fields.join(", ")) + } + } + TyData::AttrsOf { value } => format!("object<{}>", self.display(value)), + TyData::Function(func) => { + let params: Vec<_> = func.params.iter().map(|p| p.name.as_str()).collect(); + format!("function({})", params.join(", ")) + } + TyData::Union(types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" | ") + } + TyData::Sum(types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" & ") + } + TyData::TypeVar { id, constraints } => { + let mut s = id.to_string(); + if !constraints.is_empty() { + let mut parts: Vec = Vec::new(); + if constraints.must_be_indexable { + parts.push("indexable".to_string()); + } + if constraints.must_support_fields { + parts.push("object-like".to_string()); + } + if constraints.must_be_callable { + parts.push("callable".to_string()); + } + if let Some(bound) = constraints.upper_bound { + parts.push(format!("<: {}", self.display(bound))); + } + if !parts.is_empty() { + s.push_str(" where "); + s.push_str(&parts.join(", ")); + } + } + s + } + } + } + + /// Alias for `get_data()` to ease migration from `TyStore`. + /// + /// `TyStore::get()` returns `TyRef` which derefs to `TyData`. This returns + /// `TyData` directly. Callers using `*store.get(ty)` should use `store.get(ty)`. + #[inline] + #[must_use] + pub fn get(&self, ty: Ty) -> TyData { + self.get_data(ty) + } + + // ========== Type operations ========== + + /// Narrow a type by intersecting with a constraint. + pub fn narrow(&mut self, ty: Ty, constraint: Ty) -> Ty { + crate::operations::ty_and(ty, constraint, self) + } + + /// Narrow a type to one with an exact length. + /// + /// - Arrays become tuples with that length + /// - Strings with length 1 become Char + /// - Tuples must have matching length + pub fn with_len(&mut self, ty: Ty, len: usize) -> Ty { + crate::operations::ty_with_len(ty, len, self) + } + + /// Narrow a type to one with at least a minimum length. + pub fn with_min_len(&mut self, ty: Ty, min: usize) -> Ty { + crate::operations::ty_with_min_len(ty, min, self) + } + + /// Widen a type by removing a constraint. + pub fn widen(&mut self, base: Ty, remove: Ty) -> Ty { + crate::operations::ty_minus(base, remove, self) + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use super::*; + use crate::global_store::GlobalTyStore; + + #[test] + fn test_mut_store_intern_global_first() { + let global = Arc::new(GlobalTyStore::new()); + + // Pre-intern a type in global + let arr_global = global.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + // MutStore should find it in global + let mut store = MutStore::new(Arc::clone(&global)); + let arr = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + assert_eq!(arr, arr_global); + assert!(arr.is_global()); + assert!(store.local.is_empty()); + } + + #[test] + fn test_mut_store_intern_local() { + let global = Arc::new(GlobalTyStore::new()); + let mut store = MutStore::new(global); + + // Intern a new type not in global + let arr = store.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + + assert!(arr.is_local()); + assert_eq!(store.local.len(), 1); + } + + #[test] + fn test_mut_store_get_data() { + let global = Arc::new(GlobalTyStore::new()); + let arr_global = global.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + let mut store = MutStore::new(Arc::clone(&global)); + let arr_local = store.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + + // Should get data from both stores + assert!( + matches!(store.get_data(arr_global), TyData::Array { elem, .. } if elem == Ty::NUMBER) + ); + assert!( + matches!(store.get_data(arr_local), TyData::Array { elem, .. } if elem == Ty::STRING) + ); + } + + #[test] + fn test_mut_store_union() { + let global = Arc::new(GlobalTyStore::new()); + let mut store = MutStore::new(global); + + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert!(!union.is_well_known()); + + // Union with ANY is ANY + let with_any = store.union(vec![Ty::NUMBER, Ty::ANY]); + assert_eq!(with_any, Ty::ANY); + } + + #[test] + fn test_mut_store_display() { + let global = Arc::new(GlobalTyStore::new()); + let mut store = MutStore::new(global); + + assert_eq!(store.display(Ty::NUMBER), "number"); + + let arr = store.array(Ty::STRING); + assert_eq!(store.display(arr), "array"); + } +} diff --git a/crates/jrsonnet-lsp-types/src/mut_store/core.rs b/crates/jrsonnet-lsp-types/src/mut_store/core.rs new file mode 100644 index 00000000..edd21e36 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/mut_store/core.rs @@ -0,0 +1,245 @@ +use std::sync::Arc; + +use super::*; +use crate::{ + global_store::GlobalTyStore, + local_store::LocalTyStore, + store::{ + FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyConstraints, TyData, + TyVarId, + }, +}; + +impl MutStore { + /// Create a new mutable store wrapping a global store. + pub fn new(global: Arc) -> Self { + Self { + global, + local: LocalTyStore::new(), + } + } + + /// Create a new mutable store from a global store reference. + /// + /// Convenience constructor that clones the Arc. + pub fn from_ref(global: &Arc) -> Self { + Self::new(Arc::clone(global)) + } + + /// Get type data for any Ty (global or local). + #[must_use] + pub fn get_data(&self, ty: Ty) -> TyData { + if ty.is_local() { + self.local.get_data(ty).clone() + } else { + self.global.get_data(ty) + } + } + + /// Intern a type, checking global first, then local. + /// + /// - Well-known types return immediately + /// - Types already in global store return the global Ty + /// - Types already in local store return the local Ty + /// - New types are created in local store + pub fn intern(&mut self, data: TyData) -> Ty { + // Fast path for well-known types + if let Some(ty) = Ty::well_known_for_data(&data) { + return ty; + } + + // Check global store first (read-only) + { + let global_inner = self.global.read(); + if let Some(&existing) = global_inner.dedup.get(&data) { + return existing; + } + } + + // Check/create in local store + self.local.intern(data) + } + + /// Consume and return the local store for merging. + #[must_use] + pub fn into_local(self) -> LocalTyStore { + self.local + } + + /// Get reference to the global store. + #[must_use] + pub fn global(&self) -> &GlobalTyStore { + &self.global + } + + /// Get the Arc to the global store. + #[must_use] + pub fn global_arc(&self) -> &Arc { + &self.global + } + + /// Get reference to the local store. + #[must_use] + pub fn local(&self) -> &LocalTyStore { + &self.local + } + + // ========== Type constructors ========== + + /// Create an array type. + pub fn array(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { + elem, + is_set: false, + }) + } + + /// Create a set type (array with sorted, unique elements). + pub fn array_set(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { elem, is_set: true }) + } + + /// Create a tuple type. + pub fn tuple(&mut self, elems: Vec) -> Ty { + if elems.is_empty() { + return self.intern(TyData::Tuple { elems: vec![] }); + } + self.intern(TyData::Tuple { elems }) + } + + /// Create an object type. + pub fn object(&mut self, data: ObjectData) -> Ty { + self.intern(TyData::Object(data)) + } + + /// Create an open object (unknown fields). + pub fn object_any(&mut self) -> Ty { + self.object(ObjectData::open()) + } + + /// Create a generic function type (accepts any args, returns any). + pub fn function_any(&mut self) -> Ty { + self.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }) + } + + /// Create an `AttrsOf` type (object with uniform value type). + pub fn attrs_of(&mut self, value: Ty) -> Ty { + self.intern(TyData::AttrsOf { value }) + } + + /// Create a function type. + pub fn function(&mut self, data: FunctionData) -> Ty { + self.intern(TyData::Function(data)) + } + + /// Create a function with simple params and fixed return. + pub fn function_simple(&mut self, param_names: Vec<&str>, return_ty: Ty) -> Ty { + let params = param_names + .into_iter() + .map(|name| ParamInterned { + name: name.to_string(), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + self.function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }) + } + + /// Create a union type. + pub fn union(&mut self, mut types: Vec) -> Ty { + // Simplification rules + match types.as_slice() { + [] => return Ty::NEVER, + [only] => return *only, + _ => {} + } + + // Flatten nested unions and remove duplicates + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::ANY { + return Ty::ANY; // Any absorbs everything + } + if ty == Ty::NEVER { + continue; // Never is identity for union + } + if let TyData::Union(inner) = self.get_data(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + // Sort for canonical form + flattened.sort_by_key(|t| t.id()); + flattened.dedup(); + + match flattened.as_slice() { + [] => Ty::NEVER, + [only] => *only, + _ => self.intern(TyData::Union(flattened)), + } + } + + /// Create a sum (intersection) type. + pub fn sum(&mut self, mut types: Vec) -> Ty { + match types.as_slice() { + [] => return Ty::ANY, + [only] => return *only, + _ => {} + } + + // Flatten and simplify + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::NEVER { + return Ty::NEVER; // Never absorbs everything in intersection + } + if ty == Ty::ANY { + continue; // Any is identity for intersection + } + if let TyData::Sum(inner) = self.get_data(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + flattened.sort_by_key(|t| t.id()); + flattened.dedup(); + + match flattened.as_slice() { + [] => Ty::ANY, + [only] => *only, + _ => self.intern(TyData::Sum(flattened)), + } + } + + /// Create a bounded number type. + pub fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + self.intern(TyData::BoundedNumber(bounds)) + } + + /// Create a literal string type. + pub fn literal_string(&mut self, s: String) -> Ty { + self.intern(TyData::LiteralString(s)) + } + + /// Create a type variable. + pub fn type_var(&mut self, id: TyVarId, constraints: TyConstraints) -> Ty { + self.intern(TyData::TypeVar { id, constraints }) + } + + /// Create a fresh type variable with no constraints. + pub fn fresh_var(&mut self) -> Ty { + self.type_var(TyVarId::fresh(), TyConstraints::none()) + } +} diff --git a/crates/jrsonnet-lsp-types/src/mut_store/type_store_ops_impl.rs b/crates/jrsonnet-lsp-types/src/mut_store/type_store_ops_impl.rs new file mode 100644 index 00000000..f5073c39 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/mut_store/type_store_ops_impl.rs @@ -0,0 +1,52 @@ +use super::*; +use crate::store::{FunctionData, NumBounds, ObjectData, Ty, TyData, TypeStoreOps}; + +impl TypeStoreOps for MutStore { + fn get_data(&self, ty: Ty) -> TyData { + MutStore::get_data(self, ty) + } + + fn display(&self, ty: Ty) -> String { + MutStore::display(self, ty) + } + + fn array(&mut self, elem: Ty) -> Ty { + MutStore::array(self, elem) + } + + fn array_set(&mut self, elem: Ty) -> Ty { + MutStore::array_set(self, elem) + } + + fn tuple(&mut self, elems: Vec) -> Ty { + MutStore::tuple(self, elems) + } + + fn object(&mut self, data: ObjectData) -> Ty { + MutStore::object(self, data) + } + + fn attrs_of(&mut self, value: Ty) -> Ty { + MutStore::attrs_of(self, value) + } + + fn function(&mut self, data: FunctionData) -> Ty { + MutStore::function(self, data) + } + + fn union(&mut self, types: Vec) -> Ty { + MutStore::union(self, types) + } + + fn sum(&mut self, types: Vec) -> Ty { + MutStore::sum(self, types) + } + + fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + MutStore::bounded_number(self, bounds) + } + + fn literal_string(&mut self, s: String) -> Ty { + MutStore::literal_string(self, s) + } +} diff --git a/crates/jrsonnet-lsp-types/src/operations.rs b/crates/jrsonnet-lsp-types/src/operations.rs new file mode 100644 index 00000000..acbf0963 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations.rs @@ -0,0 +1,25 @@ +//! Type operations for checking and combining types. +//! +//! This module provides operations that determine the result types of +//! Jsonnet operations like binary operators, unary operators, and type +//! checking predicates. +//! +//! # Type Logic Operations +//! +//! The module also provides core type logic operations for type narrowing: +//! +//! - [`ty_and`][]: Intersection of types (narrows to what satisfies both) +//! - [`ty_minus`][]: Exclusion (removes a type from a union) +//! - [`ty_with_len`][]: Constrains to a specific length +//! - [`ty_with_min_len`][]: Constrains to a minimum length +//! +//! These operations distribute over unions, following the rule: +//! `(A | B) & C = (A & C) | (B & C)` + +mod logic; +mod operators; + +pub use logic::{ty_and, ty_minus, ty_with_field, ty_with_len, ty_with_min_len}; +pub use operators::{ + array_concat_ty, binary_op_result_ty, unary_op_result_ty, BinaryOpTypeError, UnaryOpTypeError, +}; diff --git a/crates/jrsonnet-lsp-types/src/operations/logic.rs b/crates/jrsonnet-lsp-types/src/operations/logic.rs new file mode 100644 index 00000000..b96645c1 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/logic.rs @@ -0,0 +1,11 @@ +//! Core type logic and narrowing operations. + +mod exclusion; +mod field; +mod intersection; +mod length; + +pub use exclusion::ty_minus; +pub use field::ty_with_field; +pub use intersection::ty_and; +pub use length::{ty_with_len, ty_with_min_len}; diff --git a/crates/jrsonnet-lsp-types/src/operations/logic/exclusion.rs b/crates/jrsonnet-lsp-types/src/operations/logic/exclusion.rs new file mode 100644 index 00000000..df0b0e51 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/logic/exclusion.rs @@ -0,0 +1,116 @@ +use crate::store::{Ty, TyData, TypeStoreOps}; + +/// Compute the exclusion of one type from another. +/// +/// Returns the type with the constraint removed (difference/minus). +/// This removes values that match `remove` from `base`. +/// +/// # Examples +/// +/// - `ty_minus(Number | String, Number)` → `String` +/// - `ty_minus(Bool, True)` → `False` +/// - `ty_minus(Any, Number)` → `Any` (Any is too general) +/// - `ty_minus(Number, Number)` → `Never` +/// +/// # Distribution over Unions +/// +/// This operation distributes over unions: +/// `(A | B) - C = (A - C) | (B - C)` +pub fn ty_minus(base: Ty, remove: Ty, store: &mut S) -> Ty { + // Fast paths + if base == Ty::NEVER { + return Ty::NEVER; + } + if remove == Ty::NEVER { + return base; + } + if remove == Ty::ANY { + return Ty::NEVER; + } + if base == Ty::ANY { + // Can't remove anything meaningful from Any + return Ty::ANY; + } + if base == remove { + return Ty::NEVER; + } + + let base_data = store.get_data(base); + + // Handle unions: distribute (A | B) - C = (A - C) | (B - C) + if let TyData::Union(types) = base_data { + let remaining: Vec = types + .iter() + .map(|&t| ty_minus(t, remove, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(remaining); + } + + // Special case: Bool minus True/False + let base_data = store.get_data(base); + let remove_data = store.get_data(remove); + match (&base_data, &remove_data) { + (TyData::Bool, TyData::True) => return Ty::FALSE, + (TyData::Bool, TyData::False) => return Ty::TRUE, + (TyData::True | TyData::False, TyData::Bool) => return Ty::NEVER, + _ => {} + } + + // For non-union types, if they don't match the remove type, return unchanged + base +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::TyStore; + + mod ty_minus_tests { + use super::*; + + #[test] + fn test_same_type_gives_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NEVER); + } + + #[test] + fn test_different_type_unchanged() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::STRING, &mut store), Ty::NUMBER); + } + + #[test] + fn test_any_stays_any() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::ANY, Ty::NUMBER, &mut store), Ty::ANY); + } + + #[test] + fn test_minus_any_gives_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::ANY, &mut store), Ty::NEVER); + } + + #[test] + fn test_union_removes_matching() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_minus(union, Ty::NUMBER, &mut store), Ty::STRING); + } + + #[test] + fn test_bool_minus_true_gives_false() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::BOOL, Ty::TRUE, &mut store), Ty::FALSE); + assert_eq!(ty_minus(Ty::BOOL, Ty::FALSE, &mut store), Ty::TRUE); + } + + #[test] + fn test_never_stays_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/operations/logic/field.rs b/crates/jrsonnet-lsp-types/src/operations/logic/field.rs new file mode 100644 index 00000000..6f9501e6 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/logic/field.rs @@ -0,0 +1,153 @@ +use super::intersection::ty_and; +use crate::store::{FieldDefInterned, FieldVis, ObjectData, Ty, TyData, TypeStoreOps}; + +/// Add a required field to an object type. +/// +/// Returns a new object type with the specified field added. +/// If the field already exists, its type is narrowed with the new type. +pub fn ty_with_field(ty: Ty, field: &str, field_ty: Ty, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => { + // Create an open object with this field + store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }) + } + + TyData::Object(mut obj) => { + if let Some((_, existing)) = obj.fields.iter_mut().find(|(n, _)| n == field) { + let narrowed = ty_and(existing.ty, field_ty, store); + *existing = FieldDefInterned { + ty: narrowed, + required: true, + visibility: existing.visibility, + }; + } else { + obj.fields.push(( + field.to_string(), + FieldDefInterned { + ty: field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )); + } + store.object(obj) + } + + TyData::AttrsOf { value } => { + // AttrsOf with a specific field becomes object with that field + let narrowed = ty_and(value, field_ty, store); + store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: narrowed, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }) + } + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_field(t, field, field_ty, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + // Non-object types can't have fields + _ => Ty::NEVER, + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + use crate::TyStore; + + mod ty_with_field_tests { + use super::*; + + #[test] + fn test_any_to_object() { + let mut store = TyStore::new(); + let result = ty_with_field(Ty::ANY, "foo", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + assert_eq!(obj.fields, vec![("foo".to_string(), FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + })]); + assert!(obj.has_unknown); + }); + } + + #[test] + fn test_object_adds_field() { + let mut store = TyStore::new(); + let obj = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_with_field(obj, "b", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + field_names.sort_unstable(); + assert_eq!(field_names, vec!["a", "b"]); + }); + } + + #[test] + fn test_object_narrows_existing_field() { + let mut store = TyStore::new(); + let obj = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::ANY, + required: false, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_with_field(obj, "a", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); + assert!(obj.fields[0].1.required); + }); + } + + #[test] + fn test_number_never() { + let mut store = TyStore::new(); + assert_eq!( + ty_with_field(Ty::NUMBER, "foo", Ty::STRING, &mut store), + Ty::NEVER + ); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/operations/logic/intersection.rs b/crates/jrsonnet-lsp-types/src/operations/logic/intersection.rs new file mode 100644 index 00000000..5269c6d0 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/logic/intersection.rs @@ -0,0 +1,431 @@ +use crate::store::{FieldDefInterned, ObjectData, Ty, TyData, TypeStoreOps}; + +/// Compute the intersection of two types (type narrowing). +/// +/// Returns the most specific type that satisfies both constraints. +/// This is the logical AND of types - values must satisfy both. +/// +/// # Examples +/// +/// - `ty_and(Any, Number)` → `Number` +/// - `ty_and(Number, String)` → `Never` (no value is both) +/// - `ty_and(Bool, True)` → `True` +/// - `ty_and(Number | String, Number)` → `Number` +/// +/// # Distribution over Unions +/// +/// This operation distributes over unions: +/// `(A | B) & C = (A & C) | (B & C)` +pub fn ty_and(lhs: Ty, rhs: Ty, store: &mut S) -> Ty { + // Fast paths for special types + if lhs == Ty::NEVER || rhs == Ty::NEVER { + return Ty::NEVER; + } + if lhs == Ty::ANY { + return rhs; + } + if rhs == Ty::ANY { + return lhs; + } + if lhs == rhs { + return lhs; + } + + let lhs_data = store.get_data(lhs); + let rhs_data = store.get_data(rhs); + + // Handle unions: distribute (A | B) & C = (A & C) | (B & C) + if let TyData::Union(types) = lhs_data { + let narrowed: Vec = types + .iter() + .map(|&t| ty_and(t, rhs, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(narrowed); + } + if let TyData::Union(types) = rhs_data { + let narrowed: Vec = types + .iter() + .map(|&t| ty_and(lhs, t, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(narrowed); + } + + // Refresh data after potential recursion + let lhs_data = store.get_data(lhs); + let rhs_data = store.get_data(rhs); + + // Handle literal/subtype relationships + match (&lhs_data, &rhs_data) { + // Bool and its literals + (TyData::Bool, TyData::True) | (TyData::True, TyData::Bool) => return Ty::TRUE, + (TyData::Bool, TyData::False) | (TyData::False, TyData::Bool) => return Ty::FALSE, + + // String and Char + (TyData::String, TyData::Char) | (TyData::Char, TyData::String) => return Ty::CHAR, + + // String and LiteralString + (TyData::String, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::String) => { + return store.literal_string(s.clone()); + } + + // LiteralString with same value + (TyData::LiteralString(s1), TyData::LiteralString(s2)) => { + if s1 == s2 { + return store.literal_string(s1.clone()); + } + return Ty::NEVER; + } + + // Char and LiteralString of length 1 + (TyData::Char, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::Char) => { + if s.chars().count() == 1 { + return store.literal_string(s.clone()); + } + return Ty::NEVER; + } + + _ => {} + } + + // Handle arrays (preserve is_set if both are sets) + if let ( + TyData::Array { + elem: e1, + is_set: s1, + }, + TyData::Array { + elem: e2, + is_set: s2, + }, + ) = (&lhs_data, &rhs_data) + { + let elem = ty_and(*e1, *e2, store); + if elem == Ty::NEVER { + return Ty::NEVER; + } + // Result is a set only if both inputs are sets + if *s1 && *s2 { + return store.array_set(elem); + } + return store.array(elem); + } + + // Handle tuples with arrays + if let (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) = (&lhs_data, &rhs_data) + { + let narrowed: Vec = elems.iter().map(|&e| ty_and(e, *arr_elem, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + if let (TyData::Array { elem: arr_elem, .. }, TyData::Tuple { elems }) = (&lhs_data, &rhs_data) + { + let narrowed: Vec = elems.iter().map(|&e| ty_and(*arr_elem, e, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + + // Handle tuples with tuples + if let (TyData::Tuple { elems: e1 }, TyData::Tuple { elems: e2 }) = (&lhs_data, &rhs_data) { + if e1.len() != e2.len() { + return Ty::NEVER; + } + let narrowed: Vec = e1 + .iter() + .zip(e2.iter()) + .map(|(&a, &b)| ty_and(a, b, store)) + .collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + + // Handle objects + if let (TyData::Object(obj1), TyData::Object(obj2)) = (&lhs_data, &rhs_data) { + let mut fields = obj1.fields.clone(); + for (name, def2) in &obj2.fields { + if let Some((_, def1)) = fields.iter_mut().find(|(n, _)| n == name) { + let narrowed_ty = ty_and(def1.ty, def2.ty, store); + *def1 = FieldDefInterned { + ty: narrowed_ty, + required: def1.required || def2.required, + visibility: def1.visibility, + }; + } else { + fields.push((name.clone(), def2.clone())); + } + } + let has_unknown = obj1.has_unknown && obj2.has_unknown; + return store.object(ObjectData { + fields, + has_unknown, + }); + } + + // Handle AttrsOf + if let (TyData::AttrsOf { value: v1 }, TyData::AttrsOf { value: v2 }) = (&lhs_data, &rhs_data) { + let elem = ty_and(*v1, *v2, store); + if elem == Ty::NEVER { + return Ty::NEVER; + } + return store.attrs_of(elem); + } + + // Handle object + AttrsOf + if let (TyData::Object(obj), TyData::AttrsOf { value }) = (&lhs_data, &rhs_data) { + let fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + let narrowed_ty = ty_and(field.ty, *value, store); + ( + name.clone(), + FieldDefInterned { + ty: narrowed_ty, + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + return store.object(ObjectData { + fields, + has_unknown: obj.has_unknown, + }); + } + if let (TyData::AttrsOf { value }, TyData::Object(obj)) = (&lhs_data, &rhs_data) { + let fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + let narrowed_ty = ty_and(*value, field.ty, store); + ( + name.clone(), + FieldDefInterned { + ty: narrowed_ty, + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + return store.object(ObjectData { + fields, + has_unknown: obj.has_unknown, + }); + } + + // Handle BoundedNumber + if let (TyData::Number, TyData::BoundedNumber(bounds)) + | (TyData::BoundedNumber(bounds), TyData::Number) = (&lhs_data, &rhs_data) + { + return store.bounded_number(*bounds); + } + if let (TyData::BoundedNumber(b1), TyData::BoundedNumber(b2)) = (&lhs_data, &rhs_data) { + // Intersection of bounds: take stricter bounds + let min = match (b1.min_f64(), b2.min_f64()) { + (Some(a), Some(b)) => Some(a.max(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + let max = match (b1.max_f64(), b2.max_f64()) { + (Some(a), Some(b)) => Some(a.min(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + // Check for empty range + if let (Some(lo), Some(hi)) = (min, max) { + if lo > hi { + return Ty::NEVER; + } + } + return store.bounded_number(crate::store::NumBounds { + min: min.map(f64::to_bits), + max: max.map(f64::to_bits), + }); + } + + // Handle Sum (intersection) types + if let TyData::Sum(types) = lhs_data { + let narrowed: Vec = types.iter().map(|&t| ty_and(t, rhs, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.sum(narrowed); + } + if let TyData::Sum(types) = rhs_data { + let narrowed: Vec = types.iter().map(|&t| ty_and(lhs, t, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.sum(narrowed); + } + + // Handle TypeVar - keep it, may be resolved later + if matches!(lhs_data, TyData::TypeVar { .. }) || matches!(rhs_data, TyData::TypeVar { .. }) { + return store.sum(vec![lhs, rhs]); + } + + // Different incompatible concrete types have no intersection + Ty::NEVER +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + use crate::{store::FieldVis, TyStore}; + + mod ty_and_tests { + use super::*; + + #[test] + fn test_any_narrows_to_constraint() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::ANY, Ty::NUMBER, &mut store), Ty::NUMBER); + assert_eq!(ty_and(Ty::NUMBER, Ty::ANY, &mut store), Ty::NUMBER); + } + + #[test] + fn test_never_always_never() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); + assert_eq!(ty_and(Ty::NUMBER, Ty::NEVER, &mut store), Ty::NEVER); + } + + #[test] + fn test_same_type_returns_same() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NUMBER); + assert_eq!(ty_and(Ty::STRING, Ty::STRING, &mut store), Ty::STRING); + } + + #[test] + fn test_incompatible_types_never() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NUMBER, Ty::STRING, &mut store), Ty::NEVER); + assert_eq!(ty_and(Ty::BOOL, Ty::NUMBER, &mut store), Ty::NEVER); + } + + #[test] + fn test_bool_narrows_to_literal() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::BOOL, Ty::TRUE, &mut store), Ty::TRUE); + assert_eq!(ty_and(Ty::BOOL, Ty::FALSE, &mut store), Ty::FALSE); + assert_eq!(ty_and(Ty::TRUE, Ty::BOOL, &mut store), Ty::TRUE); + assert_eq!(ty_and(Ty::FALSE, Ty::BOOL, &mut store), Ty::FALSE); + } + + #[test] + fn test_string_narrows_to_char() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::STRING, Ty::CHAR, &mut store), Ty::CHAR); + assert_eq!(ty_and(Ty::CHAR, Ty::STRING, &mut store), Ty::CHAR); + } + + #[test] + fn test_union_distributes() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + // (Number | String) & Number = Number + assert_eq!(ty_and(union, Ty::NUMBER, &mut store), Ty::NUMBER); + // Number & (Number | String) = Number + assert_eq!(ty_and(Ty::NUMBER, union, &mut store), Ty::NUMBER); + } + + #[test] + fn test_union_with_incompatible_gives_partial() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + // (Number | String) & Bool = Never (both incompatible) + assert_eq!(ty_and(union, Ty::BOOL, &mut store), Ty::NEVER); + } + + #[test] + fn test_array_intersection() { + let mut store = TyStore::new(); + let arr_num = store.array(Ty::NUMBER); + let arr_any = store.array(Ty::ANY); + // Array & Array = Array + let result = ty_and(arr_num, arr_any, &mut store); + assert!( + matches!(store.get_data(result), TyData::Array { elem, .. } if elem == Ty::NUMBER) + ); + } + + #[test] + fn test_tuple_intersection_same_length() { + let mut store = TyStore::new(); + let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::ANY]); + let tuple2 = store.tuple(vec![Ty::ANY, Ty::STRING]); + let result = ty_and(tuple1, tuple2, &mut store); + assert_matches!(store.get_data(result), TyData::Tuple { elems } => { + assert_eq!(elems, vec![Ty::NUMBER, Ty::STRING]); + }); + } + + #[test] + fn test_tuple_intersection_different_length_never() { + let mut store = TyStore::new(); + let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + let tuple2 = store.tuple(vec![Ty::NUMBER]); + assert_eq!(ty_and(tuple1, tuple2, &mut store), Ty::NEVER); + } + + #[test] + fn test_object_intersection_merges_fields() { + let mut store = TyStore::new(); + let obj1 = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let obj2 = store.object(ObjectData { + fields: vec![( + "b".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_and(obj1, obj2, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + // Should have both fields "a" and "b" + let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + field_names.sort_unstable(); + assert_eq!(field_names, vec!["a", "b"]); + }); + } + + #[test] + fn test_bounded_number_intersection() { + let mut store = TyStore::new(); + let bounded1 = store.bounded_number(crate::store::NumBounds::at_least(0.0)); + let bounded2 = store.bounded_number(crate::store::NumBounds::between(-10.0, 10.0)); + let result = ty_and(bounded1, bounded2, &mut store); + // Should get [0..10] + assert_matches!(store.get_data(result), TyData::BoundedNumber(bounds) => { + assert_eq!(bounds.min_f64(), Some(0.0)); + assert_eq!(bounds.max_f64(), Some(10.0)); + }); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/operations/logic/length.rs b/crates/jrsonnet-lsp-types/src/operations/logic/length.rs new file mode 100644 index 00000000..12fc8021 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/logic/length.rs @@ -0,0 +1,399 @@ +use crate::store::{ObjectData, Ty, TyData, TypeStoreOps}; + +/// Narrow a type to one with a specific length. +/// +/// This is useful for narrowing based on `std.length(x) == n` conditions. +/// +/// # Behavior +/// +/// - Arrays become tuples with `n` elements of the same element type +/// - Tuples must have exactly `n` elements (otherwise `Never`) +/// - Strings with length 1 become `Char` +/// - Objects must have exactly `n` fields (if closed) or at least `n` (if open) +/// - Primitives like Number/Bool return `Never` (they don't have length) +/// +/// # Examples +/// +/// - `ty_with_len(Array, 3)` → `[Number, Number, Number]` +/// - `ty_with_len(String, 1)` → `Char` +/// - `ty_with_len([Number, String], 2)` → `[Number, String]` +/// - `ty_with_len([Number, String], 3)` → `Never` +pub fn ty_with_len(ty: Ty, len: usize, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => Ty::ANY, + + TyData::Array { elem, .. } => { + let elems = vec![elem; len]; + store.tuple(elems) + } + + TyData::Tuple { elems } => { + if elems.len() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Object(obj) => { + match (obj.fields.len().cmp(&len), obj.has_unknown) { + // Exactly right number of fields + (std::cmp::Ordering::Equal, false) => ty, + // Open object with fewer fields - close it at this length + (std::cmp::Ordering::Less | std::cmp::Ordering::Equal, true) => { + if obj.fields.len() == len { + store.object(ObjectData { + fields: obj.fields, + has_unknown: false, + }) + } else { + ty // Can have unknown fields to reach the length + } + } + // Too few fields in closed object, or too many fields + (std::cmp::Ordering::Less, false) | (std::cmp::Ordering::Greater, _) => Ty::NEVER, + } + } + + TyData::Function(func) => { + if func.variadic && func.params.is_empty() { + let params = (0..len) + .map(|idx| crate::store::ParamInterned { + name: format!("arg{idx}"), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + store.function(crate::store::FunctionData { + params, + return_spec: crate::store::ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }) + } else if func.required_count() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, + + TyData::String => { + if len == 1 { + Ty::CHAR + } else { + ty // String can be any length + } + } + + TyData::LiteralString(s) => { + if s.chars().count() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if len == 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Never + | TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => Ty::NEVER, + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_len(t, len, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types.iter().map(|&t| ty_with_len(t, len, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + store.sum(narrowed) + } + } + } +} + +/// Narrow a type to one with at least a minimum length. +/// +/// This is useful for narrowing based on `std.length(x) >= n` conditions. +/// +/// # Behavior +/// +/// - Arrays stay arrays (can have any length) +/// - Tuples must have at least `n` elements +/// - Strings stay strings (can have any length) +/// - Literal strings are checked exactly against `n` +/// - Char requires `min <= 1` +/// - Objects with unknown fields stay as-is +/// +/// # Examples +/// +/// - `ty_with_min_len(Array, 3)` → `Array` +/// - `ty_with_min_len([Number, String], 1)` → `[Number, String]` +/// - `ty_with_min_len([Number], 2)` → `Never` +/// - `ty_with_min_len("ok", 3)` → `Never` +/// - `ty_with_min_len(Char, 2)` → `Never` +pub fn ty_with_min_len(ty: Ty, min: usize, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => Ty::ANY, + TyData::Never => Ty::NEVER, + + TyData::Array { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } + | TyData::String + | TyData::Function(_) + | TyData::TypeVar { .. } => ty, + + TyData::LiteralString(s) => { + if s.chars().count() >= min { + ty + } else { + Ty::NEVER + } + } + + TyData::Tuple { elems } => { + if elems.len() >= min { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if min <= 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => { + if min == 0 { + ty // Everything has "length >= 0" + } else { + Ty::NEVER + } + } + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_min_len(t, min, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_min_len(t, min, store)) + .collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + store.sum(narrowed) + } + } + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + use crate::TyStore; + + mod ty_with_len_tests { + use super::*; + + #[test] + fn test_array_to_tuple() { + let mut store = TyStore::new(); + let arr = store.array(Ty::NUMBER); + let result = ty_with_len(arr, 3, &mut store); + assert_matches!(store.get_data(result), TyData::Tuple { elems } => { + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + }); + } + + #[test] + fn test_tuple_matching_length() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_with_len(tuple, 2, &mut store), tuple); + } + + #[test] + fn test_tuple_wrong_length_never() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_with_len(tuple, 3, &mut store), Ty::NEVER); + } + + #[test] + fn test_string_len_1_to_char() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::STRING, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_len_1_ok() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::CHAR, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_len_not_1_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::CHAR, 0, &mut store), Ty::NEVER); + assert_eq!(ty_with_len(Ty::CHAR, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_number_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::NUMBER, 5, &mut store), Ty::NEVER); + } + + #[test] + fn test_literal_string_matching_len() { + let mut store = TyStore::new(); + let lit = store.literal_string("hello".to_string()); + assert_eq!(ty_with_len(lit, 5, &mut store), lit); + } + + #[test] + fn test_literal_string_wrong_len_never() { + let mut store = TyStore::new(); + let lit = store.literal_string("hello".to_string()); + assert_eq!(ty_with_len(lit, 3, &mut store), Ty::NEVER); + } + + #[test] + fn test_union_filters() { + let mut store = TyStore::new(); + let tuple2 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER]); + let tuple3 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + let union = store.union(vec![tuple2, tuple3]); + assert_eq!(ty_with_len(union, 2, &mut store), tuple2); + } + + #[test] + fn test_function_matches_required_arity() { + let mut store = TyStore::new(); + let func = store.function(crate::store::FunctionData { + params: vec![ + crate::store::ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false, + }, + crate::store::ParamInterned { + name: "y".to_string(), + ty: Ty::ANY, + has_default: true, + }, + ], + return_spec: crate::store::ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + }); + assert_eq!(ty_with_len(func, 1, &mut store), func); + assert_eq!(ty_with_len(func, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_function_any_narrows_to_exact_arity() { + let mut store = TyStore::new(); + let func = store.function_any(); + let narrowed = ty_with_len(func, 2, &mut store); + assert_matches!(store.get_data(narrowed), TyData::Function(func_data) => { + assert!(!func_data.variadic); + assert_eq!(func_data.params.len(), 2); + assert_eq!(func_data.params[0].name, "arg0"); + assert_eq!(func_data.params[1].name, "arg1"); + assert!(func_data.params.iter().all(|p| p.ty == Ty::ANY)); + assert!(func_data.params.iter().all(|p| !p.has_default)); + assert_eq!(func_data.return_spec, crate::store::ReturnSpec::Fixed(Ty::ANY)); + }); + } + } + + mod ty_with_min_len_tests { + use super::*; + + #[test] + fn test_array_unchanged() { + let mut store = TyStore::new(); + let arr = store.array(Ty::NUMBER); + assert_eq!(ty_with_min_len(arr, 5, &mut store), arr); + } + + #[test] + fn test_tuple_satisfies_min() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + assert_eq!(ty_with_min_len(tuple, 2, &mut store), tuple); + } + + #[test] + fn test_tuple_too_short_never() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER]); + assert_eq!(ty_with_min_len(tuple, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_char_min_1_ok() { + let mut store = TyStore::new(); + assert_eq!(ty_with_min_len(Ty::CHAR, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_min_2_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_min_len(Ty::CHAR, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_literal_string_meets_min() { + let mut store = TyStore::new(); + let literal = store.literal_string("hello".to_string()); + assert_eq!(ty_with_min_len(literal, 3, &mut store), literal); + } + + #[test] + fn test_literal_string_too_short() { + let mut store = TyStore::new(); + let literal = store.literal_string("hi".to_string()); + assert_eq!(ty_with_min_len(literal, 3, &mut store), Ty::NEVER); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/operations/operators.rs b/crates/jrsonnet-lsp-types/src/operations/operators.rs new file mode 100644 index 00000000..376108a1 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/operators.rs @@ -0,0 +1,502 @@ +//! Type checking and result-type computation for Jsonnet operators. + +use jrsonnet_rowan_parser::nodes::{BinaryOperatorKind, UnaryOperatorKind}; +use thiserror::Error; + +use crate::store::{FieldDefInterned, ObjectData, Ty, TyData, TypeStoreOps}; + +/// Structured reason for a binary operator type mismatch. +#[derive(Debug, Error, Clone, Copy, PartialEq, Eq)] +pub enum BinaryOpTypeError { + /// Arithmetic operators require `(number, number)`. + #[error("operator requires (number, number)")] + RequiresNumberPair, + /// `+` requires compatible operand families. + #[error("operator `+` requires compatible operand families")] + InvalidPlusOperands, + /// Bitwise operators require `(number, number)`. + #[error("bitwise operator requires (number, number)")] + RequiresBitwiseNumberPair, + /// `in` requires `(string, object)`. + #[error("operator `in` requires (string, object)")] + RequiresStringAndObject, +} + +/// Structured reason for a unary operator type mismatch. +#[derive(Debug, Error, Clone, Copy, PartialEq, Eq)] +pub enum UnaryOpTypeError { + /// `!` requires a boolean. + #[error("operator `!` requires boolean")] + NotRequiresBoolean, + /// `-` requires a number. + #[error("operator `-` requires number")] + MinusRequiresNumber, + /// `~` requires a number. + #[error("operator `~` requires number")] + BitNotRequiresNumber, +} + +pub fn binary_op_result_ty( + op: BinaryOperatorKind, + lhs: Ty, + rhs: Ty, + store: &mut S, +) -> Result { + // Any, Never short-circuit + if lhs.is_any() || rhs.is_any() { + return Ok(Ty::ANY); + } + if lhs.is_never() { + return Ok(Ty::NEVER); + } + if rhs.is_never() { + return Ok(Ty::NEVER); + } + + // Check for TypeVar + if let TyData::TypeVar { .. } = store.get_data(lhs) { + return Ok(Ty::ANY); + } + if let TyData::TypeVar { .. } = store.get_data(rhs) { + return Ok(Ty::ANY); + } + + // Handle union types by checking all combinations + if let TyData::Union(lhs_types) = store.get_data(lhs) { + let mut results = Vec::new(); + for lt in lhs_types { + match binary_op_result_ty(op, lt, rhs, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + if let TyData::Union(rhs_types) = store.get_data(rhs) { + let mut results = Vec::new(); + for rt in rhs_types { + match binary_op_result_ty(op, lhs, rt, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + + // Handle Sum (intersection) types - all variants must support the operation + if let TyData::Sum(lhs_types) = store.get_data(lhs) { + let mut results = Vec::new(); + for lt in lhs_types { + match binary_op_result_ty(op, lt, rhs, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + if let TyData::Sum(rhs_types) = store.get_data(rhs) { + let mut results = Vec::new(); + for rt in rhs_types { + match binary_op_result_ty(op, lhs, rt, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + + // Get type data for matching + let lhs_data = store.get_data(lhs); + let rhs_data = store.get_data(rhs); + + match op { + // Arithmetic: (Number, Number) -> Number + BinaryOperatorKind::Minus + | BinaryOperatorKind::Mul + | BinaryOperatorKind::Div + | BinaryOperatorKind::Modulo => { + if is_number_ty(&lhs_data) && is_number_ty(&rhs_data) { + Ok(Ty::NUMBER) + } else { + Err(BinaryOpTypeError::RequiresNumberPair) + } + } + + // Plus: overloaded for number, string, char, array, tuple, object + BinaryOperatorKind::Plus => match (&lhs_data, &rhs_data) { + (d1, d2) if is_number_ty(d1) && is_number_ty(d2) => Ok(Ty::NUMBER), + ( + TyData::String | TyData::Char | TyData::LiteralString(_), + TyData::String | TyData::Char | TyData::LiteralString(_), + ) => Ok(Ty::STRING), + (TyData::Array { elem: l, .. }, TyData::Array { elem: r, .. }) => { + let elem = store.union(vec![*l, *r]); + Ok(store.array(elem)) + } + (TyData::Tuple { elems: l }, TyData::Tuple { elems: r }) => { + // Concatenate tuple element types + let mut elements = l.clone(); + elements.extend(r.iter().copied()); + Ok(store.tuple(elements)) + } + (TyData::Array { elem: a, .. }, TyData::Tuple { elems: t }) + | (TyData::Tuple { elems: t }, TyData::Array { elem: a, .. }) => { + // Mixed array/tuple concatenation - result is array + let mut types = t.clone(); + types.push(*a); + let elem = store.union(types); + Ok(store.array(elem)) + } + (TyData::Object(l), TyData::Object(r)) => Ok(store.object(ObjectData::merge(l, r))), + (TyData::AttrsOf { value: l }, TyData::AttrsOf { value: r }) => { + let value = store.union(vec![*l, *r]); + Ok(store.attrs_of(value)) + } + (TyData::Object(obj), TyData::AttrsOf { value }) + | (TyData::AttrsOf { value }, TyData::Object(obj)) => { + // Merge object with attrs-of: result is object with fields widened + let fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + let widened_ty = store.union(vec![field.ty, *value]); + ( + name.clone(), + FieldDefInterned { + ty: widened_ty, + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + Ok(store.object(ObjectData { + fields, + has_unknown: true, // AttrsOf adds unknown fields + })) + } + _ => Err(BinaryOpTypeError::InvalidPlusOperands), + }, + + // Logical: (Bool, Bool) -> Bool (though Jsonnet actually allows any types) + BinaryOperatorKind::And | BinaryOperatorKind::Or => { + // In Jsonnet, && and || work on any types (short-circuit) + // But we can warn if operands aren't boolean + if is_bool_ty(&lhs_data) && is_bool_ty(&rhs_data) { + Ok(Ty::BOOL) + } else { + // Jsonnet allows this but returns one of the operands + Ok(store.union(vec![lhs, rhs])) + } + } + + // Bitwise: (Number, Number) -> Number + BinaryOperatorKind::BitAnd + | BinaryOperatorKind::BitOr + | BinaryOperatorKind::BitXor + | BinaryOperatorKind::Lhs + | BinaryOperatorKind::Rhs => { + if is_number_ty(&lhs_data) && is_number_ty(&rhs_data) { + Ok(Ty::NUMBER) + } else { + Err(BinaryOpTypeError::RequiresBitwiseNumberPair) + } + } + + // Comparison: any types are valid, returns Bool + BinaryOperatorKind::Eq + | BinaryOperatorKind::Ne + | BinaryOperatorKind::Lt + | BinaryOperatorKind::Gt + | BinaryOperatorKind::Le + | BinaryOperatorKind::Ge => Ok(Ty::BOOL), + + // In: (String, Object) -> Bool + BinaryOperatorKind::InKw => { + let lhs_is_string = matches!( + lhs_data, + TyData::String | TyData::Char | TyData::LiteralString(_) + ); + let rhs_is_object = matches!(rhs_data, TyData::Object(_) | TyData::AttrsOf { .. }); + if lhs_is_string && rhs_is_object { + Ok(Ty::BOOL) + } else { + Err(BinaryOpTypeError::RequiresStringAndObject) + } + } + + // Null coalesce: any types, returns union + BinaryOperatorKind::NullCoaelse => Ok(store.union(vec![lhs, rhs])), + + // Internal/error operators - treat as Any + BinaryOperatorKind::MetaObjectApply | BinaryOperatorKind::ErrorNoOperator => Ok(Ty::ANY), + } +} + +/// Check if a unary operation is valid and return the result type. +/// +/// Returns `Ok(result_ty)` if the operation is valid for the given operand type, +/// or `Err(error_kind)` if the operation is invalid. +/// +/// # Errors +/// Returns `Err` when the operand type does not support the requested operator. +pub fn unary_op_result_ty( + op: UnaryOperatorKind, + operand: Ty, + store: &mut S, +) -> Result { + // Any, Never short-circuit + if operand.is_any() { + return Ok(Ty::ANY); + } + if operand.is_never() { + return Ok(Ty::NEVER); + } + + // Check for TypeVar + if let TyData::TypeVar { .. } = store.get_data(operand) { + return Ok(Ty::ANY); + } + + // Handle union types + if let TyData::Union(types) = store.get_data(operand) { + let mut results = Vec::new(); + for t in types { + match unary_op_result_ty(op, t, store) { + Ok(r) => results.push(r), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + + let operand_data = store.get_data(operand); + + match op { + UnaryOperatorKind::Not => { + if is_bool_ty(&operand_data) { + Ok(Ty::BOOL) + } else { + Err(UnaryOpTypeError::NotRequiresBoolean) + } + } + UnaryOperatorKind::Minus => { + if is_number_ty(&operand_data) { + Ok(Ty::NUMBER) + } else { + Err(UnaryOpTypeError::MinusRequiresNumber) + } + } + UnaryOperatorKind::BitNot => { + if is_number_ty(&operand_data) { + Ok(Ty::NUMBER) + } else { + Err(UnaryOpTypeError::BitNotRequiresNumber) + } + } + } +} + +/// Helper to check if `TyData` represents a number type. +fn is_number_ty(data: &TyData) -> bool { + matches!(data, TyData::Number | TyData::BoundedNumber(_)) +} + +/// Helper to check if `TyData` represents a boolean type. +fn is_bool_ty(data: &TyData) -> bool { + matches!(data, TyData::Bool | TyData::True | TyData::False) +} + +/// Concatenate two arrays or tuples. +/// +/// Returns the type of the concatenated result. +pub fn array_concat_ty(left: Ty, right: Ty, store: &mut S) -> Ty { + let left_data = store.get_data(left); + let right_data = store.get_data(right); + + match (left_data, right_data) { + (TyData::Tuple { elems: a }, TyData::Tuple { elems: b }) => { + let mut elements = a; + elements.extend(b); + store.tuple(elements) + } + (TyData::Array { elem: a, .. }, TyData::Array { elem: b, .. }) => { + let elem = store.union(vec![a, b]); + store.array(elem) + } + (TyData::Tuple { elems: t }, TyData::Array { elem: a, .. }) + | (TyData::Array { elem: a, .. }, TyData::Tuple { elems: t }) => { + let mut types = t; + types.push(a); + let elem = store.union(types); + store.array(elem) + } + _ => store.array(Ty::ANY), + } +} + +// ============================================================================= + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + use crate::TyStore; + + #[test] + fn test_binary_op_valid_number_plus_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::NUMBER, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_binary_op_valid_string_plus_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::STRING, Ty::STRING, &mut store); + assert_eq!(result, Ok(Ty::STRING)); + } + + #[test] + fn test_binary_op_invalid_string_plus_number_ty() { + let mut store = TyStore::new(); + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::STRING, Ty::NUMBER, &mut store) + .expect_err("String + Number is invalid"); + } + + #[test] + fn test_binary_op_comparison_returns_bool_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Lt, Ty::NUMBER, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_binary_op_with_any_ty() { + let mut store = TyStore::new(); + let result = binary_op_result_ty(BinaryOperatorKind::Plus, Ty::ANY, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::ANY)); + } + + #[test] + fn test_binary_op_with_never_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::NEVER, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NEVER)); + } + + #[test] + fn test_unary_op_valid_not_bool_ty() { + let mut store = TyStore::new(); + let result = unary_op_result_ty(UnaryOperatorKind::Not, Ty::BOOL, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_unary_op_invalid_not_number_ty() { + let mut store = TyStore::new(); + unary_op_result_ty(UnaryOperatorKind::Not, Ty::NUMBER, &mut store) + .expect_err("!Number is invalid"); + } + + #[test] + fn test_unary_op_minus_number_ty() { + let mut store = TyStore::new(); + let result = unary_op_result_ty(UnaryOperatorKind::Minus, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_unary_op_bitnot_number_ty() { + let mut store = TyStore::new(); + let result = unary_op_result_ty(UnaryOperatorKind::BitNot, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_array_concat_tuples_ty() { + let mut store = TyStore::new(); + let left = store.tuple(vec![Ty::NUMBER]); + let right = store.tuple(vec![Ty::STRING]); + let result = array_concat_ty(left, right, &mut store); + assert_matches!(store.get_data(result), TyData::Tuple { elems } => { + assert_eq!(elems, vec![Ty::NUMBER, Ty::STRING]); + }); + } + + #[test] + fn test_array_concat_arrays_ty() { + let mut store = TyStore::new(); + let left = store.array(Ty::NUMBER); + let right = store.array(Ty::STRING); + let result = array_concat_ty(left, right, &mut store); + assert_matches!(store.get_data(result), TyData::Array { .. }); + } + + #[test] + fn test_binary_op_union_lhs_ty() { + let mut store = TyStore::new(); + // (Number | String) + Number should fail (String + Number invalid) + let union_ty = store.union(vec![Ty::NUMBER, Ty::STRING]); + binary_op_result_ty(BinaryOperatorKind::Plus, union_ty, Ty::NUMBER, &mut store) + .expect_err("(Number|String) + Number is invalid because String+Number fails"); + } + + #[test] + fn test_binary_op_union_valid_ty() { + let mut store = TyStore::new(); + // (Number | Number) + Number should succeed + let union_ty = store.union(vec![Ty::NUMBER, Ty::NUMBER]); + binary_op_result_ty(BinaryOperatorKind::Plus, union_ty, Ty::NUMBER, &mut store) + .expect("(Number|Number) + Number should succeed"); + } + + #[test] + fn test_binary_op_in_ty() { + let mut store = TyStore::new(); + let obj_ty = store.object(ObjectData { + fields: vec![], + has_unknown: true, + }); + let result = binary_op_result_ty(BinaryOperatorKind::InKw, Ty::STRING, obj_ty, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_binary_op_bitwise_ty() { + let mut store = TyStore::new(); + let result = binary_op_result_ty( + BinaryOperatorKind::BitAnd, + Ty::NUMBER, + Ty::NUMBER, + &mut store, + ); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_binary_op_logical_bool_ty() { + let mut store = TyStore::new(); + let result = binary_op_result_ty(BinaryOperatorKind::And, Ty::BOOL, Ty::BOOL, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_binary_op_null_coalesce_ty() { + let mut store = TyStore::new(); + let result_ty = binary_op_result_ty( + BinaryOperatorKind::NullCoaelse, + Ty::NUMBER, + Ty::STRING, + &mut store, + ) + .expect("null coalesce should succeed"); + // Result should be union of both types + assert_matches!(store.get_data(result_ty), TyData::Union(_)); + } +} diff --git a/crates/jrsonnet-lsp-types/src/semantic_tokens.rs b/crates/jrsonnet-lsp-types/src/semantic_tokens.rs new file mode 100644 index 00000000..b0e47c68 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/semantic_tokens.rs @@ -0,0 +1,167 @@ +//! Shared semantic token type/modifier names used by the LSP stack. + +use std::str::FromStr; + +/// Semantic token type with compile-time index. +/// +/// The enum values match indices in the semantic token legend type list. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[repr(u32)] +pub enum SemanticTokenTypeName { + Namespace = 0, + Parameter = 7, + Variable = 8, + Property = 9, + Function = 12, + Method = 13, + Keyword = 15, + Comment = 17, + String = 18, + Number = 19, + Operator = 21, +} + +impl SemanticTokenTypeName { + #[must_use] + pub const fn as_index(self) -> u32 { + self as u32 + } + + #[must_use] + pub const fn from_repr(value: u32) -> Option { + match value { + 0 => Some(Self::Namespace), + 7 => Some(Self::Parameter), + 8 => Some(Self::Variable), + 9 => Some(Self::Property), + 12 => Some(Self::Function), + 13 => Some(Self::Method), + 15 => Some(Self::Keyword), + 17 => Some(Self::Comment), + 18 => Some(Self::String), + 19 => Some(Self::Number), + 21 => Some(Self::Operator), + _ => None, + } + } + + #[must_use] + pub const fn as_str(self) -> &'static str { + match self { + Self::Namespace => "namespace", + Self::Parameter => "parameter", + Self::Variable => "variable", + Self::Property => "property", + Self::Function => "function", + Self::Method => "method", + Self::Keyword => "keyword", + Self::Comment => "comment", + Self::String => "string", + Self::Number => "number", + Self::Operator => "operator", + } + } +} + +impl FromStr for SemanticTokenTypeName { + type Err = (); + + fn from_str(value: &str) -> Result { + match value.to_ascii_lowercase().as_str() { + "namespace" => Ok(Self::Namespace), + "parameter" => Ok(Self::Parameter), + "variable" => Ok(Self::Variable), + "property" => Ok(Self::Property), + "function" => Ok(Self::Function), + "method" => Ok(Self::Method), + "keyword" => Ok(Self::Keyword), + "comment" => Ok(Self::Comment), + "string" => Ok(Self::String), + "number" => Ok(Self::Number), + "operator" => Ok(Self::Operator), + _ => Err(()), + } + } +} + +/// Semantic token modifier names with their corresponding LSP bit flags. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[repr(u32)] +pub enum SemanticTokenModifierName { + Declaration = 0, + Definition = 1, + Readonly = 2, + Static = 3, + Deprecated = 4, + Abstract = 5, + Async = 6, + Modification = 7, + Documentation = 8, + DefaultLibrary = 9, +} + +impl SemanticTokenModifierName { + #[must_use] + pub const fn as_index(self) -> u32 { + self as u32 + } + + #[must_use] + pub const fn as_bitset(self) -> u32 { + 1 << self.as_index() + } + + #[must_use] + pub const fn from_repr(value: u32) -> Option { + match value { + 0 => Some(Self::Declaration), + 1 => Some(Self::Definition), + 2 => Some(Self::Readonly), + 3 => Some(Self::Static), + 4 => Some(Self::Deprecated), + 5 => Some(Self::Abstract), + 6 => Some(Self::Async), + 7 => Some(Self::Modification), + 8 => Some(Self::Documentation), + 9 => Some(Self::DefaultLibrary), + _ => None, + } + } + + #[must_use] + pub const fn as_str(self) -> &'static str { + match self { + Self::Declaration => "declaration", + Self::Definition => "definition", + Self::Readonly => "readonly", + Self::Static => "static", + Self::Deprecated => "deprecated", + Self::Abstract => "abstract", + Self::Async => "async", + Self::Modification => "modification", + Self::Documentation => "documentation", + Self::DefaultLibrary => "default-library", + } + } +} + +impl FromStr for SemanticTokenModifierName { + type Err = (); + + fn from_str(value: &str) -> Result { + let normalized = value.to_ascii_lowercase().replace('-', "_"); + match normalized.as_str() { + "declaration" => Ok(Self::Declaration), + "definition" => Ok(Self::Definition), + "readonly" => Ok(Self::Readonly), + "static" => Ok(Self::Static), + "deprecated" => Ok(Self::Deprecated), + "abstract" => Ok(Self::Abstract), + "async" => Ok(Self::Async), + "modification" => Ok(Self::Modification), + "documentation" => Ok(Self::Documentation), + "default_library" | "defaultlibrary" => Ok(Self::DefaultLibrary), + _ => Err(()), + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/store.rs b/crates/jrsonnet-lsp-types/src/store.rs new file mode 100644 index 00000000..18581399 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store.rs @@ -0,0 +1,880 @@ +//! Interned type storage for efficient type representation. +//! +//! This module provides type interning - storing types once and referencing them +//! by small integer IDs. This provides: +//! +//! - **Memory efficiency**: Identical types are stored only once +//! - **O(1) equality**: Comparing types is just integer comparison +//! - **Free cloning**: `Ty` is `Copy`, no heap allocation needed +//! - **Cache-friendly**: Types stored contiguously in `Vec` +//! +//! # Architecture +//! +//! The type system uses three main components: +//! +//! - [`Ty`]: A 4-byte type ID that references interned type data +//! - [`TyData`]: The actual type representation (primitives, compounds, etc.) +//! - [`TyStore`]: The storage that holds all interned types +//! +//! # Example +//! +//! ```ignore +//! let mut store = TyStore::new(); +//! +//! // Intern some types +//! let num = Ty::NUMBER; // Built-in constant +//! let arr = store.array(num); // Array +//! +//! // Same type gives same ID +//! let arr2 = store.array(num); +//! assert_eq!(arr, arr2); // O(1) comparison +//! +//! // Look up type data +//! let data = store.get(arr); +//! assert!(matches!(data, TyData::Array { .. })); +//! ``` + +use std::sync::atomic::{AtomicU32, Ordering}; + +use rustc_hash::FxHashMap; + +fn to_u32(value: usize) -> Option { + u32::try_from(value).ok() +} + +/// Global counter for generating unique type variable IDs. +static TYPE_VAR_COUNTER: AtomicU32 = AtomicU32::new(0); +static ANY_TY_DATA: TyData = TyData::Any; + +/// An interned type reference. +/// +/// This is a small (4 byte) identifier that references type data stored in a [`TyStore`]. +/// It implements `Copy`, so cloning is free (just copies 4 bytes). +/// +/// Type equality is O(1) - just comparing the internal ID. +/// +/// # Local vs Global Types +/// +/// The upper bit (bit 31) is the `LOCAL_BIT`: +/// - `LOCAL_BIT = 0`: Global type (in `GlobalTyStore`) +/// - `LOCAL_BIT = 1`: Local type (in `LocalTyStore`, temporary during analysis) +/// +/// After analysis, local types are merged into the global store via substitution. +/// +/// # Well-Known Types +/// +/// Common types have predefined constants for efficiency: +/// - [`Ty::ANY`], [`Ty::NEVER`] - top and bottom types +/// - [`Ty::NULL`], [`Ty::BOOL`], [`Ty::NUMBER`], [`Ty::STRING`] - primitives +/// - [`Ty::TRUE`], [`Ty::FALSE`] - boolean literals +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Ty(u32); + +impl Ty { + /// Bit flag indicating a local (per-file) type vs global type. + pub const LOCAL_BIT: u32 = 1 << 31; + + /// Mask for extracting the raw index without the local bit. + const INDEX_MASK: u32 = !Self::LOCAL_BIT; + + // Well-known type constants (indices 0-15 are reserved, always global) + /// The top type - any value is valid. + pub const ANY: Ty = Ty(0); + /// The bottom type - no value is valid (unreachable code). + pub const NEVER: Ty = Ty(1); + /// The null type. + pub const NULL: Ty = Ty(2); + /// Boolean type (true or false). + pub const BOOL: Ty = Ty(3); + /// The literal `true` value. + pub const TRUE: Ty = Ty(4); + /// The literal `false` value. + pub const FALSE: Ty = Ty(5); + /// Numeric type (any number). + pub const NUMBER: Ty = Ty(6); + /// String type. + pub const STRING: Ty = Ty(7); + /// Single character type (string of length 1). + pub const CHAR: Ty = Ty(8); + + /// Number of reserved well-known type slots. + pub const RESERVED_COUNT: u32 = 16; + + /// Create a Ty from a raw index (global, no local bit). + #[inline] + #[must_use] + pub const fn from_raw(index: u32) -> Ty { + Ty(index & Self::INDEX_MASK) + } + + /// Create a local Ty from a raw index. + #[inline] + #[must_use] + pub const fn from_raw_local(index: u32) -> Ty { + Ty((index & Self::INDEX_MASK) | Self::LOCAL_BIT) + } + + /// Get the raw ID including the local bit (for debugging). + #[inline] + #[must_use] + pub fn id(self) -> u32 { + self.0 + } + + /// Get the raw index without the local bit. + #[inline] + #[must_use] + pub fn raw_index(self) -> u32 { + self.0 & Self::INDEX_MASK + } + + /// Check if this is a local (per-file) type. + #[inline] + #[must_use] + pub fn is_local(self) -> bool { + self.0 & Self::LOCAL_BIT != 0 + } + + /// Check if this is a global type. + #[inline] + #[must_use] + pub fn is_global(self) -> bool { + !self.is_local() + } + + /// Check if this is the Any type. + #[inline] + #[must_use] + pub fn is_any(self) -> bool { + self == Self::ANY + } + + /// Check if this is the Never type. + #[inline] + #[must_use] + pub fn is_never(self) -> bool { + self == Self::NEVER + } + + /// Check if this is a well-known type constant. + /// + /// Well-known types (ANY, NEVER, NULL, BOOL, TRUE, FALSE, NUMBER, STRING, CHAR) + /// have the same `Ty` value across all stores and are always global. + #[inline] + #[must_use] + pub fn is_well_known(self) -> bool { + // Well-known types are always global (no LOCAL_BIT) and have low indices + self.0 < Self::RESERVED_COUNT + } + + /// Get the well-known Ty for a `TyData`, if it matches a well-known type. + #[must_use] + pub fn well_known_for_data(data: &TyData) -> Option { + match data { + TyData::Any => Some(Ty::ANY), + TyData::Never => Some(Ty::NEVER), + TyData::Null => Some(Ty::NULL), + TyData::Bool => Some(Ty::BOOL), + TyData::True => Some(Ty::TRUE), + TyData::False => Some(Ty::FALSE), + TyData::Number => Some(Ty::NUMBER), + TyData::String => Some(Ty::STRING), + TyData::Char => Some(Ty::CHAR), + _ => None, + } + } +} + +/// Error returned when converting a local [`Ty`] to a [`GlobalTy`]. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct NotGlobalTy(Ty); + +impl NotGlobalTy { + /// Return the original type that failed conversion. + #[must_use] + pub const fn ty(self) -> Ty { + self.0 + } +} + +impl std::fmt::Display for NotGlobalTy { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "expected a global type, got local {:?}", self.0) + } +} + +impl std::error::Error for NotGlobalTy {} + +/// A type known to be global (stored in [`crate::global_store::GlobalTyStore`]). +/// +/// This wrapper prevents APIs that cross analysis boundaries from accidentally +/// accepting local (per-analysis) type IDs. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct GlobalTy(Ty); + +impl GlobalTy { + /// Global top type. + pub const ANY: Self = Self(Ty::ANY); + /// Global bottom type. + pub const NEVER: Self = Self(Ty::NEVER); + /// Global null type. + pub const NULL: Self = Self(Ty::NULL); + /// Global boolean type. + pub const BOOL: Self = Self(Ty::BOOL); + /// Global literal `true` type. + pub const TRUE: Self = Self(Ty::TRUE); + /// Global literal `false` type. + pub const FALSE: Self = Self(Ty::FALSE); + /// Global number type. + pub const NUMBER: Self = Self(Ty::NUMBER); + /// Global string type. + pub const STRING: Self = Self(Ty::STRING); + /// Global character type. + pub const CHAR: Self = Self(Ty::CHAR); + + /// Construct a [`GlobalTy`] if the provided type is global. + #[must_use] + pub fn new(ty: Ty) -> Option { + if ty.is_global() { + Some(Self(ty)) + } else { + None + } + } + + /// Return the underlying [`Ty`]. + #[must_use] + pub const fn as_ty(self) -> Ty { + self.0 + } + + /// Return the raw global index. + #[must_use] + pub fn raw_index(self) -> u32 { + self.0.raw_index() + } +} + +impl std::fmt::Debug for GlobalTy { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +impl std::fmt::Display for GlobalTy { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } +} + +impl PartialEq for GlobalTy { + fn eq(&self, other: &Ty) -> bool { + self.0 == *other + } +} + +impl PartialEq for Ty { + fn eq(&self, other: &GlobalTy) -> bool { + *self == other.0 + } +} + +impl TryFrom for GlobalTy { + type Error = NotGlobalTy; + + /// # Errors + /// + /// Returns [`NotGlobalTy`] if `value` is a local type ID. + fn try_from(value: Ty) -> Result { + Self::new(value).ok_or(NotGlobalTy(value)) + } +} + +impl From for Ty { + fn from(value: GlobalTy) -> Self { + value.0 + } +} + +impl std::fmt::Debug for Ty { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match *self { + Ty::ANY => write!(f, "Ty::ANY"), + Ty::NEVER => write!(f, "Ty::NEVER"), + Ty::NULL => write!(f, "Ty::NULL"), + Ty::BOOL => write!(f, "Ty::BOOL"), + Ty::TRUE => write!(f, "Ty::TRUE"), + Ty::FALSE => write!(f, "Ty::FALSE"), + Ty::NUMBER => write!(f, "Ty::NUMBER"), + Ty::STRING => write!(f, "Ty::STRING"), + Ty::CHAR => write!(f, "Ty::CHAR"), + ty if ty.is_local() => write!(f, "Ty(L{})", ty.raw_index()), + Ty(id) => write!(f, "Ty({id})"), + } + } +} + +/// A reference to type data with access to the store for display. +/// +/// This wrapper holds both a reference to the type data and the store, +/// enabling `Display` implementation that can recursively format nested types. +/// +/// Implements `Deref` for ergonomic access to the underlying data. +/// +/// # Example +/// ```ignore +/// let store = TyStore::new(); +/// let arr = store.array(Ty::NUMBER); +/// +/// // Display works directly +/// println!("{}", store.get(arr)); // "array" +/// +/// // Pattern matching via Deref +/// match &*store.get(arr) { +/// TyData::Array { elem } => println!("element: {}", store.get(*elem)), +/// _ => {} +/// } +/// ``` +pub struct TyRef<'a> { + store: &'a TyStore, + ty: Ty, +} + +impl<'a> TyRef<'a> { + /// Get the Ty ID this reference points to. + #[must_use] + pub fn ty(&self) -> Ty { + self.ty + } + + /// Get a reference to another type from the same store. + #[must_use] + pub fn get(&self, ty: Ty) -> TyRef<'a> { + TyRef { + store: self.store, + ty, + } + } +} + +impl std::ops::Deref for TyRef<'_> { + type Target = TyData; + + fn deref(&self) -> &Self::Target { + self.store + .data + .get(self.ty.raw_index() as usize) + .or_else(|| self.store.data.first()) + .unwrap_or(&ANY_TY_DATA) + } +} + +impl std::fmt::Display for TyRef<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.store.display(self.ty)) + } +} + +impl std::fmt::Debug for TyRef<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TyRef") + .field("ty", &self.ty) + .field("data", &**self) + .finish() + } +} + +/// Unique identifier for a type variable. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TyVarId(pub u32); + +impl TyVarId { + /// Generate a fresh type variable ID. + pub fn fresh() -> Self { + TyVarId(TYPE_VAR_COUNTER.fetch_add(1, Ordering::SeqCst)) + } +} + +impl std::fmt::Display for TyVarId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + // Display as Greek letters for small IDs, T prefix for larger + let greek = ['α', 'β', 'γ', 'δ', 'ε', 'ζ', 'η', 'θ']; + if let Some(letter) = greek.get(self.0 as usize) { + return write!(f, "{letter}"); + } + write!(f, "T{}", self.0) + } +} + +/// Numeric bounds for range validation. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct NumBounds { + /// Minimum value (as bits for hashing), if known. + pub min: Option, + /// Maximum value (as bits for hashing), if known. + pub max: Option, +} + +impl NumBounds { + /// Create unbounded numeric range. + #[must_use] + pub fn unbounded() -> Self { + Self { + min: None, + max: None, + } + } + + /// Create a non-negative range (>= 0). + #[must_use] + pub fn non_negative() -> Self { + Self { + min: Some(0.0_f64.to_bits()), + max: None, + } + } + + /// Create a range with minimum bound. + #[must_use] + pub fn at_least(min: f64) -> Self { + Self { + min: Some(min.to_bits()), + max: None, + } + } + + /// Create a range with both bounds. + #[must_use] + pub fn between(min: f64, max: f64) -> Self { + Self { + min: Some(min.to_bits()), + max: Some(max.to_bits()), + } + } + + /// Get the minimum as f64. + pub fn min_f64(&self) -> Option { + self.min.map(f64::from_bits) + } + + /// Get the maximum as f64. + pub fn max_f64(&self) -> Option { + self.max.map(f64::from_bits) + } + + /// Check if this range is a subset of another range. + #[must_use] + pub fn is_subset_of(&self, other: &NumBounds) -> bool { + let self_min = self.min_f64(); + let self_max = self.max_f64(); + let other_min = other.min_f64(); + let other_max = other.max_f64(); + + // Our min must be >= their min (or they have no min) + let min_ok = match (self_min, other_min) { + (_, None) => true, + (Some(a), Some(b)) => a >= b, + (None, Some(_)) => false, // We go to -inf, they don't + }; + // Our max must be <= their max (or they have no max) + let max_ok = match (self_max, other_max) { + (_, None) => true, + (Some(a), Some(b)) => a <= b, + (None, Some(_)) => false, // We go to +inf, they don't + }; + min_ok && max_ok + } + + /// Intersect two numeric bounds, returning the tighter constraint. + /// + /// Returns `None` if the intersection is empty (min > max). + pub fn intersect(&self, other: &NumBounds) -> Option { + // Take the maximum of the minimums (tighter lower bound) + let new_min = match (self.min_f64(), other.min_f64()) { + (Some(a), Some(b)) => Some(a.max(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + + // Take the minimum of the maximums (tighter upper bound) + let new_max = match (self.max_f64(), other.max_f64()) { + (Some(a), Some(b)) => Some(a.min(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + + // Check if the intersection is empty + if let (Some(min), Some(max)) = (new_min, new_max) { + if min > max { + return None; // Empty intersection + } + } + + Some(NumBounds { + min: new_min.map(f64::to_bits), + max: new_max.map(f64::to_bits), + }) + } +} + +/// Visibility of an object field. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] +pub enum FieldVis { + /// Normal field (`:`) - visible in output + #[default] + Normal, + /// Hidden field (`::`) - not visible in output + Hidden, + /// Force visible field (`:::`) - always visible + ForceVisible, +} + +/// Definition of an object field in interned form. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FieldDefInterned { + /// The type of the field value. + pub ty: Ty, + /// Whether this field is required. + pub required: bool, + /// Visibility of the field. + pub visibility: FieldVis, +} + +/// Object type data. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ObjectData { + /// Known fields (sorted by name for canonical form). + pub fields: Vec<(String, FieldDefInterned)>, + /// Whether the object may have unknown fields. + pub has_unknown: bool, +} + +impl ObjectData { + /// Create an empty closed object. + #[must_use] + pub fn empty() -> Self { + Self { + fields: Vec::new(), + has_unknown: false, + } + } + + /// Create an open object (may have unknown fields). + #[must_use] + pub fn open() -> Self { + Self { + fields: Vec::new(), + has_unknown: true, + } + } + + /// Get a field by name. + #[must_use] + pub fn get_field(&self, name: &str) -> Option<&FieldDefInterned> { + self.fields.iter().find(|(n, _)| n == name).map(|(_, f)| f) + } + + /// Merge two objects (right fields override left). + #[must_use] + pub fn merge(left: &ObjectData, right: &ObjectData) -> Self { + let mut fields = left.fields.clone(); + // Add/override fields from right + for (name, field) in &right.fields { + if let Some((_, existing)) = fields.iter_mut().find(|(n, _)| n == name) { + *existing = field.clone(); + } else { + fields.push((name.clone(), field.clone())); + } + } + // Sort for canonical form + fields.sort_by(|(a, _), (b, _)| a.cmp(b)); + Self { + fields, + has_unknown: left.has_unknown || right.has_unknown, + } + } +} + +/// Function parameter in interned form. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ParamInterned { + /// Parameter name. + pub name: String, + /// Expected parameter type. + pub ty: Ty, + /// Whether the parameter has a default value. + pub has_default: bool, +} + +/// How a function's return type is determined. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ReturnSpec { + /// Fixed return type. + Fixed(Ty), + /// Return type is same as argument at index. + SameAsArg(usize), + /// Return type is array of argument type at index. + ArrayOfArg(usize), + /// Return type keeps element type of array arg. + ArrayWithSameElements(usize), + /// Return type is a set (sorted, unique array) with same element type as array arg. + SetWithSameElements(usize), + /// Return type is function's return type at index. + FuncReturnType(usize), + /// Return type is Array. + ArrayOfFuncReturn(usize), + /// Return type is flattened array from func. + FlatMapResult(usize), + /// Return type is non-negative number. + NonNegative, + /// Return type is array of object values. + ObjectValuesType(usize), +} + +impl Default for ReturnSpec { + fn default() -> Self { + ReturnSpec::Fixed(Ty::ANY) + } +} + +/// Function type data. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FunctionData { + /// Function parameters. + pub params: Vec, + /// How return type is determined. + pub return_spec: ReturnSpec, + /// Whether function is variadic. + pub variadic: bool, +} + +impl FunctionData { + /// Count of required parameters. + #[must_use] + pub fn required_count(&self) -> usize { + self.params.iter().filter(|p| !p.has_default).count() + } + + /// Total parameter count. + #[must_use] + pub fn total_count(&self) -> usize { + self.params.len() + } +} + +/// Constraints on a type variable. +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)] +pub struct TyConstraints { + /// Must be indexable (array, object, string). + pub must_be_indexable: bool, + /// Must support field access (object). + pub must_support_fields: bool, + /// Must be callable (function). + pub must_be_callable: bool, + /// Upper bound type (must be subtype of this). + pub upper_bound: Option, +} + +impl TyConstraints { + /// No constraints. + #[must_use] + pub fn none() -> Self { + Self::default() + } + + /// Check if there are any constraints. + #[must_use] + pub fn is_empty(&self) -> bool { + !self.must_be_indexable + && !self.must_support_fields + && !self.must_be_callable + && self.upper_bound.is_none() + } + + /// Check if a type satisfies these constraints. + #[must_use] + pub fn satisfied_by(&self, ty: Ty, store: &TyStore) -> bool { + if self.must_be_indexable && !store.is_indexable(ty) { + return false; + } + if self.must_support_fields && !store.supports_field_access(ty) { + return false; + } + if self.must_be_callable && !store.is_callable(ty) { + return false; + } + // Check upper bound + if let Some(upper) = self.upper_bound { + if !store.is_subtype_of(ty, upper) { + return false; + } + } + true + } +} + +/// A substitution mapping type variable IDs to concrete types (Ty-native). +#[derive(Debug, Clone, Default)] +pub struct TySubstitution { + /// Mapping from type variable IDs to their substituted types. + pub mappings: FxHashMap, +} + +impl TySubstitution { + /// Create an empty substitution. + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Add a mapping from a type variable to a type. + pub fn insert(&mut self, var: TyVarId, ty: Ty) { + self.mappings.insert(var, ty); + } + + /// Look up a type variable's substitution. + #[must_use] + pub fn get(&self, var: TyVarId) -> Option { + self.mappings.get(&var).copied() + } + + /// Check if a type variable ID occurs in a type. + #[must_use] + pub fn occurs_in(var: TyVarId, ty: Ty, store: &TyStore) -> bool { + match *store.get(ty) { + TyData::TypeVar { id, .. } => id == var, + TyData::Array { elem, .. } => Self::occurs_in(var, elem, store), + TyData::Tuple { ref elems } => elems.iter().any(|e| Self::occurs_in(var, *e, store)), + TyData::Union(ref variants) => variants.iter().any(|v| Self::occurs_in(var, *v, store)), + TyData::Object(ref obj) => obj + .fields + .iter() + .any(|(_, fd)| Self::occurs_in(var, fd.ty, store)), + TyData::AttrsOf { value } => Self::occurs_in(var, value, store), + TyData::Function(ref func) => { + func.params + .iter() + .any(|p| Self::occurs_in(var, p.ty, store)) + || matches!(&func.return_spec, ReturnSpec::Fixed(ret) if Self::occurs_in(var, *ret, store)) + } + _ => false, + } + } +} + +/// The actual type data that a [`Ty`] references. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TyData { + /// Top type - any value. + Any, + /// Bottom type - no value. + Never, + /// Null value. + Null, + /// Boolean (true or false). + Bool, + /// Literal true. + True, + /// Literal false. + False, + /// Number (unbounded). + Number, + /// Number with bounds. + BoundedNumber(NumBounds), + /// String. + String, + /// Single character. + Char, + /// Literal string value. + LiteralString(String), + + /// Array with element type and optional set semantics. + /// + /// When `is_set` is true, the array is known to be sorted and contain unique elements. + /// This is useful for tracking the result of `std.set()` calls. + Array { elem: Ty, is_set: bool }, + /// Tuple with fixed element types. + Tuple { elems: Vec }, + /// Object with known fields. + Object(ObjectData), + /// Object where all values have same type. + AttrsOf { value: Ty }, + /// Function type. + Function(FunctionData), + + /// Union of types (value matches ANY). + Union(Vec), + /// Intersection of types (value matches ALL). + Sum(Vec), + + /// Type variable with constraints. + TypeVar { + id: TyVarId, + constraints: TyConstraints, + }, +} + +/// Common operations for type stores. +/// +/// This trait abstracts over `TyStore` and `MutStore`, allowing unification +/// and type operation code to work with either store type without duplication. +pub trait TypeStoreOps { + /// Get type data for a Ty (returns owned `TyData` for simplicity). + fn get_data(&self, ty: Ty) -> TyData; + + /// Format a type for display. + fn display(&self, ty: Ty) -> String; + + /// Create an array type. + fn array(&mut self, elem: Ty) -> Ty; + + /// Create a set type (array with sorted, unique elements). + fn array_set(&mut self, elem: Ty) -> Ty; + + /// Create a tuple type. + fn tuple(&mut self, elems: Vec) -> Ty; + + /// Create an object type. + fn object(&mut self, data: ObjectData) -> Ty; + + /// Create an `AttrsOf` type (object with uniform value type). + fn attrs_of(&mut self, value: Ty) -> Ty; + + /// Create a function type. + fn function(&mut self, data: FunctionData) -> Ty; + + /// Create a union type. + fn union(&mut self, types: Vec) -> Ty; + + /// Create a sum (intersection) type. + fn sum(&mut self, types: Vec) -> Ty; + + /// Create a bounded number type. + fn bounded_number(&mut self, bounds: NumBounds) -> Ty; + + /// Create a literal string type. + fn literal_string(&mut self, s: String) -> Ty; +} + +/// Storage for interned types. +/// +/// Types are stored once and referenced by [`Ty`] IDs. +/// The store ensures deduplication - the same type data always +/// maps to the same ID. +#[derive(Debug, Clone)] +pub struct TyStore { + /// Type data indexed by Ty ID. + data: Vec, + /// Reverse mapping for deduplication. + dedup: FxHashMap, +} + +mod impl_analysis; +mod impl_core; +mod impl_transform; +mod ops_impl; +mod thread_local_store; + +pub use thread_local_store::{reset_store, with_store}; diff --git a/crates/jrsonnet-lsp-types/src/store/impl_analysis.rs b/crates/jrsonnet-lsp-types/src/store/impl_analysis.rs new file mode 100644 index 00000000..4f766df6 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store/impl_analysis.rs @@ -0,0 +1,949 @@ +use super::*; + +impl TyStore { + /// Narrow a type by intersecting with a constraint. + /// + /// Returns the most specific type that satisfies both. + /// For example, `narrow(Any, Number)` returns `Number`. + pub fn narrow(&mut self, ty: Ty, constraint: Ty) -> Ty { + crate::operations::ty_and(ty, constraint, self) + } + + /// Widen a type by removing a constraint. + /// + /// Returns the type with the constraint removed. + /// For example, `widen(Union(Number, String), Number)` returns `String`. + pub fn widen(&mut self, base: Ty, remove: Ty) -> Ty { + crate::operations::ty_minus(base, remove, self) + } + + /// Narrow a type to one with a specific length. + /// + /// - Arrays become tuples with n elements + /// - Strings with length 1 become Char + /// - Tuples must have matching length + pub fn with_len(&mut self, ty: Ty, len: usize) -> Ty { + crate::operations::ty_with_len(ty, len, self) + } + + /// Narrow a type to one with at least a minimum length. + pub fn with_min_len(&mut self, ty: Ty, min: usize) -> Ty { + crate::operations::ty_with_min_len(ty, min, self) + } + + /// Check if type is indexable. + #[must_use] + pub fn is_indexable(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::Any + | TyData::String + | TyData::Char + | TyData::Array { .. } + | TyData::Tuple { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } => true, + TyData::Union(ref types) | TyData::Sum(ref types) => { + types.iter().all(|&t| self.is_indexable(t)) + } + TyData::TypeVar { + ref constraints, .. + } => constraints.must_be_indexable, + _ => false, + } + } + + /// Check if type supports field access. + #[must_use] + pub fn supports_field_access(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::Any | TyData::Object(_) | TyData::AttrsOf { .. } => true, + TyData::Union(ref types) | TyData::Sum(ref types) => { + types.iter().all(|&t| self.supports_field_access(t)) + } + TyData::TypeVar { + ref constraints, .. + } => constraints.must_support_fields, + _ => false, + } + } + + /// Check if type is callable. + #[must_use] + pub fn is_callable(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::Any | TyData::Function(_) => true, + TyData::Union(ref types) | TyData::Sum(ref types) => { + types.iter().all(|&t| self.is_callable(t)) + } + TyData::TypeVar { + ref constraints, .. + } => constraints.must_be_callable, + _ => false, + } + } + + /// Simplified subtype check for constraint satisfaction. + /// + /// Checks if `subtype` is a subtype of `supertype`. This is a simplified + /// version that handles the most common cases; for full subtype checking + /// use the unification module. + #[must_use] + pub fn is_subtype_of(&self, subtype: Ty, supertype: Ty) -> bool { + // Fast paths + if subtype == supertype { + return true; + } + if subtype == Ty::NEVER { + return true; // Never is subtype of everything + } + if supertype == Ty::ANY { + return true; // Everything is subtype of Any + } + if subtype == Ty::ANY { + return false; // Any is only subtype of Any (already checked) + } + + match (&*self.get(subtype), &*self.get(supertype)) { + // Char <: String + // LiteralString <: Char (if single char) + (TyData::LiteralString(s), TyData::Char) => s.chars().count() == 1, + // Char <: String + // LiteralString <: String + // True <: Bool, False <: Bool + // BoundedNumber <: Number + (TyData::Char | TyData::LiteralString(_), TyData::String) + | (TyData::True | TyData::False, TyData::Bool) + | (TyData::BoundedNumber(_), TyData::Number) => true, + // Array covariance: Array <: Array if A <: B + ( + TyData::Array { elem: sub_elem, .. }, + TyData::Array { + elem: super_elem, .. + }, + ) => self.is_subtype_of(*sub_elem, *super_elem), + // Tuple <: Array if all elements are subtypes + (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) => { + elems.iter().all(|&e| self.is_subtype_of(e, *arr_elem)) + } + // Union subtyping: all variants must be subtypes + (TyData::Union(variants), _) => { + let variants = variants.clone(); + variants.iter().all(|&v| self.is_subtype_of(v, supertype)) + } + // Subtype of union: must be subtype of some variant + (_, TyData::Union(variants)) => { + let variants = variants.clone(); + variants.iter().any(|&v| self.is_subtype_of(subtype, v)) + } + // Default: not a subtype + _ => false, + } + } + + /// Apply a substitution to a type, replacing all type variables. + pub fn apply_substitution(&mut self, ty: Ty, sub: &TySubstitution) -> Ty { + // Clone the data to avoid borrow issues + let data = self.get(ty).clone(); + match data { + TyData::TypeVar { id, .. } => sub.get(id).unwrap_or(ty), + TyData::Array { elem, is_set } => { + let new_elem = self.apply_substitution(elem, sub); + if is_set { + self.array_set(new_elem) + } else { + self.array(new_elem) + } + } + TyData::Tuple { elems } => { + // Collect elements first to avoid closure borrowing issues + let elems_vec: Vec = elems; + let mut new_elems = Vec::with_capacity(elems_vec.len()); + for e in elems_vec { + new_elems.push(self.apply_substitution(e, sub)); + } + self.tuple(new_elems) + } + TyData::Union(variants) => { + let variants_vec: Vec = variants; + let mut new_variants = Vec::with_capacity(variants_vec.len()); + for v in variants_vec { + new_variants.push(self.apply_substitution(v, sub)); + } + self.union(new_variants) + } + TyData::Sum(variants) => { + let variants_vec: Vec = variants; + let mut new_variants = Vec::with_capacity(variants_vec.len()); + for v in variants_vec { + new_variants.push(self.apply_substitution(v, sub)); + } + self.sum(new_variants) + } + TyData::Object(obj) => { + // Extract field info first + let field_info: Vec<_> = obj + .fields + .iter() + .map(|(name, fd)| (name.clone(), fd.ty, fd.required, fd.visibility)) + .collect(); + let has_unknown = obj.has_unknown; + // Now apply substitutions + let mut new_fields = Vec::with_capacity(field_info.len()); + for (name, ty, required, visibility) in field_info { + new_fields.push(( + name, + FieldDefInterned { + ty: self.apply_substitution(ty, sub), + required, + visibility, + }, + )); + } + self.object(ObjectData { + fields: new_fields, + has_unknown, + }) + } + TyData::AttrsOf { value } => { + let new_value = self.apply_substitution(value, sub); + self.attrs_of(new_value) + } + TyData::Function(func) => { + // Extract param info first + let param_info: Vec<_> = func + .params + .iter() + .map(|p| (p.name.clone(), p.ty, p.has_default)) + .collect(); + let (old_return_spec, variadic) = (func.return_spec.clone(), func.variadic); + // Now apply substitutions + let mut new_params = Vec::with_capacity(param_info.len()); + for (name, ty, has_default) in param_info { + new_params.push(ParamInterned { + name, + ty: self.apply_substitution(ty, sub), + has_default, + }); + } + let new_return_spec = match old_return_spec { + ReturnSpec::Fixed(ret) => ReturnSpec::Fixed(self.apply_substitution(ret, sub)), + other => other, + }; + self.intern(TyData::Function(FunctionData { + params: new_params, + return_spec: new_return_spec, + variadic, + })) + } + // Primitives and other types don't contain type variables + _ => ty, + } + } + + /// Check if a type has any type variables. + #[must_use] + pub fn has_type_vars(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::TypeVar { .. } => true, + TyData::Array { elem, .. } => self.has_type_vars(elem), + TyData::Tuple { ref elems } => elems.iter().any(|&e| self.has_type_vars(e)), + TyData::Union(ref variants) | TyData::Sum(ref variants) => { + variants.iter().any(|&v| self.has_type_vars(v)) + } + TyData::Object(ref obj) => obj.fields.iter().any(|(_, fd)| self.has_type_vars(fd.ty)), + TyData::AttrsOf { value } => self.has_type_vars(value), + TyData::Function(ref func) => { + func.params.iter().any(|p| self.has_type_vars(p.ty)) + || matches!(&func.return_spec, ReturnSpec::Fixed(ret) if self.has_type_vars(*ret)) + } + _ => false, + } + } + + /// Format a type for display. + #[must_use] + pub fn display(&self, ty: Ty) -> String { + match *self.get(ty) { + TyData::Any => "any".to_string(), + TyData::Never => "never".to_string(), + TyData::Null => "null".to_string(), + TyData::Bool => "boolean".to_string(), + TyData::True => "true".to_string(), + TyData::False => "false".to_string(), + TyData::Number => "number".to_string(), + TyData::BoundedNumber(bounds) => match (bounds.min_f64(), bounds.max_f64()) { + (None, None) => "number".to_string(), + (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { + if lo.fract() == 0.0 { + format!("{lo:.0}") + } else { + format!("{lo}") + } + } + (Some(lo), Some(hi)) => format!("number[{lo}..{hi}]"), + (Some(lo), None) => format!("number[{lo}..]"), + (None, Some(hi)) => format!("number[..{hi}]"), + }, + TyData::String => "string".to_string(), + TyData::Char => "char".to_string(), + TyData::LiteralString(ref s) => format!("\"{s}\""), + TyData::Array { elem, is_set } => { + if is_set { + format!("set<{}>", self.display(elem)) + } else { + format!("array<{}>", self.display(elem)) + } + } + TyData::Tuple { ref elems } => { + let types: Vec<_> = elems.iter().map(|&t| self.display(t)).collect(); + format!("[{}]", types.join(", ")) + } + TyData::Object(ref obj) => { + if obj.fields.is_empty() && !obj.has_unknown { + "{}".to_string() + } else if obj.has_unknown { + "object".to_string() + } else { + let mut fields: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + fields.sort_unstable(); + format!("{{ {} }}", fields.join(", ")) + } + } + TyData::AttrsOf { value } => format!("object<{}>", self.display(value)), + TyData::Function(ref func) => { + let params: Vec<_> = func.params.iter().map(|p| p.name.as_str()).collect(); + format!("function({})", params.join(", ")) + } + TyData::Union(ref types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" | ") + } + TyData::Sum(ref types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" & ") + } + TyData::TypeVar { + id, + ref constraints, + } => { + let mut s = id.to_string(); + if !constraints.is_empty() { + let mut parts: Vec = Vec::new(); + if constraints.must_be_indexable { + parts.push("indexable".to_string()); + } + if constraints.must_support_fields { + parts.push("object-like".to_string()); + } + if constraints.must_be_callable { + parts.push("callable".to_string()); + } + if let Some(bound) = constraints.upper_bound { + parts.push(format!("<: {}", self.display(bound))); + } + if !parts.is_empty() { + s.push_str(" where "); + s.push_str(&parts.join(", ")); + } + } + s + } + } + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + + #[test] + fn test_well_known_types() { + let store = TyStore::new(); + + // Check that well-known types have correct data + assert!(matches!(*store.get(Ty::ANY), TyData::Any)); + assert!(matches!(*store.get(Ty::NEVER), TyData::Never)); + assert!(matches!(*store.get(Ty::NULL), TyData::Null)); + assert!(matches!(*store.get(Ty::BOOL), TyData::Bool)); + assert!(matches!(*store.get(Ty::TRUE), TyData::True)); + assert!(matches!(*store.get(Ty::FALSE), TyData::False)); + assert!(matches!(*store.get(Ty::NUMBER), TyData::Number)); + assert!(matches!(*store.get(Ty::STRING), TyData::String)); + assert!(matches!(*store.get(Ty::CHAR), TyData::Char)); + } + + #[test] + fn test_global_ty_accepts_global() { + let global = GlobalTy::new(Ty::NUMBER); + assert_eq!(global, Some(GlobalTy::NUMBER)); + } + + #[test] + fn test_global_ty_rejects_local() { + let local = Ty::from_raw_local(123); + assert!(GlobalTy::new(local).is_none()); + assert_eq!(GlobalTy::try_from(local), Err(NotGlobalTy(local))); + } + + #[test] + fn test_intern_deduplication() { + let mut store = TyStore::new(); + + // Same type data should return same ID + let arr1 = store.array(Ty::NUMBER); + let arr2 = store.array(Ty::NUMBER); + assert_eq!(arr1, arr2); + + // Different element type should be different + let arr3 = store.array(Ty::STRING); + assert_ne!(arr1, arr3); + } + + #[test] + fn test_array_type() { + let mut store = TyStore::new(); + + let arr = store.array(Ty::NUMBER); + assert!( + matches!(*store.get(arr), TyData::Array { elem, is_set: false } if elem == Ty::NUMBER) + ); + } + + #[test] + fn test_array_set_type() { + let mut store = TyStore::new(); + + let arr_set = store.array_set(Ty::NUMBER); + assert!( + matches!(*store.get(arr_set), TyData::Array { elem, is_set: true } if elem == Ty::NUMBER) + ); + + // Sets and arrays are distinct types + let arr = store.array(Ty::NUMBER); + assert_ne!(arr, arr_set); + } + + #[test] + fn test_tuple_type() { + let mut store = TyStore::new(); + + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + assert_matches!(*store.get(tuple), TyData::Tuple { ref elems } => { + assert_eq!(elems, &vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + }); + } + + #[test] + fn test_union_simplification() { + let mut store = TyStore::new(); + + // Empty union is Never + let empty = store.union(vec![]); + assert_eq!(empty, Ty::NEVER); + + // Single element union is just the element + let single = store.union(vec![Ty::NUMBER]); + assert_eq!(single, Ty::NUMBER); + + // Union with Any is Any + let with_any = store.union(vec![Ty::NUMBER, Ty::ANY, Ty::STRING]); + assert_eq!(with_any, Ty::ANY); + + // Union without Never removes it + let with_never = store.union(vec![Ty::NUMBER, Ty::NEVER, Ty::STRING]); + if let TyData::Union(ref types) = *store.get(with_never) { + assert!(!types.contains(&Ty::NEVER)); + } + } + + #[test] + fn test_union_flattening() { + let mut store = TyStore::new(); + + // Create nested union + let inner = store.union(vec![Ty::NUMBER, Ty::STRING]); + let outer = store.union(vec![inner, Ty::BOOL]); + + // Should be flattened - union types are sorted for determinism + assert_matches!(*store.get(outer), TyData::Union(ref types) => { + let mut expected = vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]; + expected.sort(); + let mut actual = types.clone(); + actual.sort(); + assert_eq!(actual, expected); + }); + } + + #[test] + fn test_sum_simplification() { + let mut store = TyStore::new(); + + // Empty sum is Any + let empty = store.sum(vec![]); + assert_eq!(empty, Ty::ANY); + + // Sum with Never is Never + let with_never = store.sum(vec![Ty::NUMBER, Ty::NEVER, Ty::STRING]); + assert_eq!(with_never, Ty::NEVER); + + // Sum with Any removes it (Any is identity) + let with_any = store.sum(vec![Ty::NUMBER, Ty::ANY]); + assert_eq!(with_any, Ty::NUMBER); + } + + #[test] + fn test_is_indexable() { + let mut store = TyStore::new(); + + assert!(store.is_indexable(Ty::ANY)); + assert!(store.is_indexable(Ty::STRING)); + assert!(store.is_indexable(Ty::CHAR)); + + let arr = store.array(Ty::NUMBER); + assert!(store.is_indexable(arr)); + + let obj = store.object_any(); + assert!(store.is_indexable(obj)); + + assert!(!store.is_indexable(Ty::NUMBER)); + assert!(!store.is_indexable(Ty::BOOL)); + } + + #[test] + fn test_supports_field_access() { + let mut store = TyStore::new(); + + assert!(store.supports_field_access(Ty::ANY)); + + let obj = store.object_any(); + assert!(store.supports_field_access(obj)); + + let attrs = store.attrs_of(Ty::NUMBER); + assert!(store.supports_field_access(attrs)); + + assert!(!store.supports_field_access(Ty::NUMBER)); + assert!(!store.supports_field_access(Ty::STRING)); + } + + #[test] + fn test_is_callable() { + let mut store = TyStore::new(); + + assert!(store.is_callable(Ty::ANY)); + + let func = store.function_simple(vec!["x"], Ty::NUMBER); + assert!(store.is_callable(func)); + + assert!(!store.is_callable(Ty::NUMBER)); + assert!(!store.is_callable(Ty::STRING)); + } + + #[test] + fn test_display() { + let mut store = TyStore::new(); + + assert_eq!(store.display(Ty::ANY), "any"); + assert_eq!(store.display(Ty::NEVER), "never"); + assert_eq!(store.display(Ty::NULL), "null"); + assert_eq!(store.display(Ty::BOOL), "boolean"); + assert_eq!(store.display(Ty::NUMBER), "number"); + assert_eq!(store.display(Ty::STRING), "string"); + + let arr = store.array(Ty::NUMBER); + assert_eq!(store.display(arr), "array"); + + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(store.display(union), "number | string"); + } + + #[test] + fn test_ty_copy() { + // Ty should be Copy + let ty = Ty::NUMBER; + let ty2 = ty; // Copy, not move + assert_eq!(ty, ty2); + } + + #[test] + fn test_ty_size() { + // Ty should be 4 bytes + assert_eq!(std::mem::size_of::(), 4); + } + + #[test] + fn test_type_var() { + let mut store = TyStore::new(); + + let var = store.fresh_var(); + assert!(matches!(*store.get(var), TyData::TypeVar { .. })); + } + + #[test] + fn test_bounded_number() { + let mut store = TyStore::new(); + + let bounded = store.bounded_number(NumBounds::non_negative()); + assert_eq!(store.display(bounded), "number[0..]"); + } + + #[test] + fn test_literal_string() { + let mut store = TyStore::new(); + + let lit = store.literal_string("hello".to_string()); + assert_eq!(store.display(lit), "\"hello\""); + } + + #[test] + fn test_object_with_fields() { + let mut store = TyStore::new(); + + let obj = store.object(ObjectData { + fields: vec![ + ( + "name".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + ), + ( + "age".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + ), + ], + has_unknown: false, + }); + + assert_matches!(*store.get(obj), TyData::Object(ref data) => { + assert_eq!( + data.fields, + vec![ + ( + "name".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + ), + ( + "age".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + ), + ] + ); + }); + } + + #[test] + fn test_function_type() { + let mut store = TyStore::new(); + + let func = store.function(FunctionData { + params: vec![ + ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: false, + }, + ParamInterned { + name: "y".to_string(), + ty: Ty::STRING, + has_default: true, + }, + ], + return_spec: ReturnSpec::Fixed(Ty::BOOL), + variadic: false, + }); + + assert_matches!(*store.get(func), TyData::Function(ref data) => { + assert_eq!(data.required_count(), 1); + assert_eq!(data.total_count(), 2); + }); + } + + use rstest::rstest; + + #[rstest] + #[case::any_to_number(Ty::ANY, Ty::NUMBER, Ty::NUMBER)] + #[case::number_to_number(Ty::NUMBER, Ty::NUMBER, Ty::NUMBER)] + #[case::number_to_string_never(Ty::NUMBER, Ty::STRING, Ty::NEVER)] + #[case::never_stays_never(Ty::NEVER, Ty::NUMBER, Ty::NEVER)] + #[case::constraint_never_is_never(Ty::NUMBER, Ty::NEVER, Ty::NEVER)] + #[case::bool_to_true(Ty::BOOL, Ty::TRUE, Ty::TRUE)] + #[case::bool_to_false(Ty::BOOL, Ty::FALSE, Ty::FALSE)] + fn test_narrow(#[case] ty: Ty, #[case] constraint: Ty, #[case] expected: Ty) { + let mut store = TyStore::new(); + assert_eq!(store.narrow(ty, constraint), expected); + } + + #[test] + fn test_narrow_union() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(store.narrow(union, Ty::NUMBER), Ty::NUMBER); + } + + #[test] + fn test_narrow_number_with_bounded() { + let mut store = TyStore::new(); + let bounded = store.bounded_number(NumBounds::non_negative()); + let result = store.narrow(Ty::NUMBER, bounded); + assert_eq!(result, bounded); + } + + #[test] + fn test_narrow_bounded_with_number() { + let mut store = TyStore::new(); + let bounded = store.bounded_number(NumBounds::non_negative()); + let result = store.narrow(bounded, Ty::NUMBER); + assert_eq!(result, bounded); + } + + #[test] + fn test_narrow_bounded_intersect() { + let mut store = TyStore::new(); + // [0, inf) intersected with [-inf, 10] = [0, 10] + let non_neg = store.bounded_number(NumBounds::non_negative()); + let at_most_10 = store.bounded_number(NumBounds { + min: None, + max: Some(10.0_f64.to_bits()), + }); + let result = store.narrow(non_neg, at_most_10); + let expected = store.bounded_number(NumBounds::between(0.0, 10.0)); + assert_eq!(result, expected); + } + + #[test] + fn test_narrow_bounded_empty_intersection() { + let mut store = TyStore::new(); + // [10, inf) intersected with [-inf, 5] = empty + let at_least_10 = store.bounded_number(NumBounds::at_least(10.0)); + let at_most_5 = store.bounded_number(NumBounds { + min: None, + max: Some(5.0_f64.to_bits()), + }); + let result = store.narrow(at_least_10, at_most_5); + assert_eq!(result, Ty::NEVER); + } + + #[test] + fn test_num_bounds_intersect_both_bounded() { + let b1 = NumBounds::between(0.0, 100.0); + let b2 = NumBounds::between(50.0, 200.0); + let result = b1.intersect(&b2).expect("should intersect"); + assert_eq!(result.min_f64(), Some(50.0)); + assert_eq!(result.max_f64(), Some(100.0)); + } + + #[test] + fn test_num_bounds_intersect_empty() { + let b1 = NumBounds::between(0.0, 10.0); + let b2 = NumBounds::between(20.0, 30.0); + assert!(b1.intersect(&b2).is_none()); + } + + #[test] + fn test_num_bounds_intersect_one_unbounded() { + let bounded = NumBounds::between(5.0, 15.0); + let unbounded = NumBounds::unbounded(); + let result = bounded.intersect(&unbounded).expect("should intersect"); + assert_eq!(result.min_f64(), Some(5.0)); + assert_eq!(result.max_f64(), Some(15.0)); + } + + #[rstest] + #[case::same_type_is_never(Ty::NUMBER, Ty::NUMBER, Ty::NEVER)] + #[case::different_type_unchanged(Ty::NUMBER, Ty::STRING, Ty::NUMBER)] + #[case::any_stays_any(Ty::ANY, Ty::NUMBER, Ty::ANY)] + #[case::never_stays_never(Ty::NEVER, Ty::NUMBER, Ty::NEVER)] + #[case::remove_never_unchanged(Ty::NUMBER, Ty::NEVER, Ty::NUMBER)] + fn test_widen(#[case] base: Ty, #[case] remove: Ty, #[case] expected: Ty) { + let mut store = TyStore::new(); + assert_eq!(store.widen(base, remove), expected); + } + + #[test] + fn test_widen_union_removes_type() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(store.widen(union, Ty::NUMBER), Ty::STRING); + } + + #[test] + fn test_with_len_array_to_tuple() { + let mut store = TyStore::new(); + let arr = store.array(Ty::NUMBER); + let result = store.with_len(arr, 3); + let expected = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + assert_eq!(result, expected); + } + + #[rstest] + #[case::string_len_1_to_char(Ty::STRING, 1, Ty::CHAR)] + #[case::char_len_1_stays(Ty::CHAR, 1, Ty::CHAR)] + #[case::char_len_0_never(Ty::CHAR, 0, Ty::NEVER)] + #[case::char_len_2_never(Ty::CHAR, 2, Ty::NEVER)] + fn test_with_len(#[case] ty: Ty, #[case] len: usize, #[case] expected: Ty) { + let mut store = TyStore::new(); + assert_eq!(store.with_len(ty, len), expected); + } + + #[test] + fn test_with_len_function_uses_required_arity() { + let mut store = TyStore::new(); + let func = store.function(FunctionData { + params: vec![ + ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false, + }, + ParamInterned { + name: "y".to_string(), + ty: Ty::ANY, + has_default: true, + }, + ], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + }); + assert_eq!(store.with_len(func, 1), func); + assert_eq!(store.with_len(func, 2), Ty::NEVER); + } + + #[test] + fn test_with_len_function_any_narrows_to_exact_arity() { + let mut store = TyStore::new(); + let func_any = store.function_any(); + let narrowed = store.with_len(func_any, 2); + let TyData::Function(func) = store.get(narrowed).clone() else { + panic!("expected function"); + }; + assert!(!func.variadic); + assert_eq!( + func.params + .iter() + .map(|p| p.name.as_str()) + .collect::>(), + vec!["arg0", "arg1"] + ); + assert!(func.params.iter().all(|p| p.ty == Ty::ANY)); + assert!(func.params.iter().all(|p| !p.has_default)); + assert_eq!(func.return_spec, ReturnSpec::Fixed(Ty::ANY)); + } + + mod test_is_subtype_of { + use super::*; + + #[rstest] + #[case::same_type(Ty::NUMBER, Ty::NUMBER, true)] + #[case::never_to_any(Ty::NEVER, Ty::ANY, true)] + #[case::never_to_number(Ty::NEVER, Ty::NUMBER, true)] + #[case::any_to_any(Ty::ANY, Ty::ANY, true)] + #[case::number_to_any(Ty::NUMBER, Ty::ANY, true)] + #[case::any_to_number(Ty::ANY, Ty::NUMBER, false)] + #[case::char_to_string(Ty::CHAR, Ty::STRING, true)] + #[case::string_to_char(Ty::STRING, Ty::CHAR, false)] + #[case::true_to_bool(Ty::TRUE, Ty::BOOL, true)] + #[case::false_to_bool(Ty::FALSE, Ty::BOOL, true)] + #[case::bool_to_true(Ty::BOOL, Ty::TRUE, false)] + #[case::number_to_string(Ty::NUMBER, Ty::STRING, false)] + fn test_well_known(#[case] sub: Ty, #[case] sup: Ty, #[case] expected: bool) { + let store = TyStore::new(); + assert_eq!(store.is_subtype_of(sub, sup), expected); + } + + #[test] + fn test_array_covariance() { + let mut store = TyStore::new(); + let arr_char = store.array(Ty::CHAR); + let arr_string = store.array(Ty::STRING); + // Array <: Array because Char <: String + assert!(store.is_subtype_of(arr_char, arr_string)); + // Array NOT <: Array + assert!(!store.is_subtype_of(arr_string, arr_char)); + } + + #[test] + fn test_tuple_to_array() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::CHAR, Ty::CHAR]); + let arr_string = store.array(Ty::STRING); + // [Char, Char] <: Array because Char <: String + assert!(store.is_subtype_of(tuple, arr_string)); + } + + #[test] + fn test_union_subtype() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::TRUE, Ty::FALSE]); + // (True | False) <: Bool + assert!(store.is_subtype_of(union, Ty::BOOL)); + } + + #[test] + fn test_subtype_of_union() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + // Number <: (Number | String) + assert!(store.is_subtype_of(Ty::NUMBER, union)); + } + } + + mod test_constraints_satisfied_by { + use super::*; + + #[test] + fn test_no_constraints() { + let store = TyStore::new(); + let constraints = TyConstraints::none(); + assert!(constraints.satisfied_by(Ty::NUMBER, &store)); + assert!(constraints.satisfied_by(Ty::STRING, &store)); + assert!(constraints.satisfied_by(Ty::ANY, &store)); + } + + #[test] + fn test_must_be_indexable() { + let mut store = TyStore::new(); + let constraints = TyConstraints { + must_be_indexable: true, + ..Default::default() + }; + // Arrays are indexable + let arr = store.array(Ty::NUMBER); + assert!(constraints.satisfied_by(arr, &store)); + // Strings are indexable + assert!(constraints.satisfied_by(Ty::STRING, &store)); + // Numbers are not indexable + assert!(!constraints.satisfied_by(Ty::NUMBER, &store)); + } + + #[test] + fn test_upper_bound() { + let store = TyStore::new(); + let constraints = TyConstraints { + upper_bound: Some(Ty::STRING), + ..Default::default() + }; + // Char <: String + assert!(constraints.satisfied_by(Ty::CHAR, &store)); + // String <: String + assert!(constraints.satisfied_by(Ty::STRING, &store)); + // Number NOT <: String + assert!(!constraints.satisfied_by(Ty::NUMBER, &store)); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/store/impl_core.rs b/crates/jrsonnet-lsp-types/src/store/impl_core.rs new file mode 100644 index 00000000..e7e49da0 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store/impl_core.rs @@ -0,0 +1,255 @@ +use super::*; + +impl TyStore { + /// Create a new type store with well-known types pre-populated. + #[must_use] + pub fn new() -> Self { + let mut store = Self { + data: Vec::with_capacity(64), + dedup: FxHashMap::default(), + }; + store.init_builtins(); + store + } + + /// Initialize built-in well-known types. + fn init_builtins(&mut self) { + // Must match the order of Ty constants! + let builtins = [ + TyData::Any, // 0 = ANY + TyData::Never, // 1 = NEVER + TyData::Null, // 2 = NULL + TyData::Bool, // 3 = BOOL + TyData::True, // 4 = TRUE + TyData::False, // 5 = FALSE + TyData::Number, // 6 = NUMBER + TyData::String, // 7 = STRING + TyData::Char, // 8 = CHAR + // Padding to RESERVED_COUNT + TyData::Any, // 9 - reserved + TyData::Any, // 10 - reserved + TyData::Any, // 11 - reserved + TyData::Any, // 12 - reserved + TyData::Any, // 13 - reserved + TyData::Any, // 14 - reserved + TyData::Any, // 15 - reserved + ]; + + for (i, data) in builtins.into_iter().enumerate() { + let Some(raw_id) = to_u32(i) else { + return; + }; + let ty = Ty::from_raw(raw_id); + self.data.push(data.clone()); + // Only dedup the non-padding entries + if i < 9 { + self.dedup.insert(data, ty); + } + } + + debug_assert_eq!(self.data.len(), Ty::RESERVED_COUNT as usize); + } + + /// Intern a type, returning existing ID if already present. + /// + pub fn intern(&mut self, data: TyData) -> Ty { + // Fast path for well-known types + if let Some(ty) = Ty::well_known_for_data(&data) { + return ty; + } + + // Check if already interned + if let Some(&existing) = self.dedup.get(&data) { + return existing; + } + + // Intern new type + let Some(raw_id) = to_u32(self.data.len()) else { + return Ty::ANY; + }; + let id = Ty::from_raw(raw_id); + self.data.push(data.clone()); + self.dedup.insert(data, id); + id + } + + /// Get a reference to type data with display capability. + /// + /// Returns a `TyRef` that derefs to `&TyData` and implements `Display`. + /// Use `*store.get(ty)` to pattern match on the underlying `TyData`. + #[inline] + #[must_use] + pub fn get(&self, ty: Ty) -> TyRef<'_> { + TyRef { store: self, ty } + } + + /// Get the number of interned types. + #[must_use] + pub fn len(&self) -> usize { + self.data.len() + } + + /// Check if empty (never true after init). + #[must_use] + pub fn is_empty(&self) -> bool { + self.data.is_empty() + } + + /// Create an array type. + pub fn array(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { + elem, + is_set: false, + }) + } + + /// Create a set type (array with sorted, unique elements). + pub fn array_set(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { elem, is_set: true }) + } + + /// Create a tuple type. + pub fn tuple(&mut self, elems: Vec) -> Ty { + if elems.is_empty() { + // Empty tuple is a closed empty array + return self.intern(TyData::Tuple { elems: vec![] }); + } + self.intern(TyData::Tuple { elems }) + } + + /// Create an object type. + pub fn object(&mut self, data: ObjectData) -> Ty { + self.intern(TyData::Object(data)) + } + + /// Create an open object (unknown fields). + pub fn object_any(&mut self) -> Ty { + self.object(ObjectData::open()) + } + + /// Create a generic function type (accepts any args, returns any). + pub fn function_any(&mut self) -> Ty { + self.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }) + } + + /// Create an `AttrsOf` type (object with uniform value type). + pub fn attrs_of(&mut self, value: Ty) -> Ty { + self.intern(TyData::AttrsOf { value }) + } + + /// Create a function type. + pub fn function(&mut self, data: FunctionData) -> Ty { + self.intern(TyData::Function(data)) + } + + /// Create a function with simple params and fixed return. + pub fn function_simple(&mut self, param_names: Vec<&str>, return_ty: Ty) -> Ty { + let params = param_names + .into_iter() + .map(|name| ParamInterned { + name: name.to_string(), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + self.function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }) + } + + /// Create a union type. + pub fn union(&mut self, mut types: Vec) -> Ty { + // Simplification rules + match types.as_slice() { + [] => return Ty::NEVER, + [only] => return *only, + _ => {} + } + + // Flatten nested unions and remove duplicates + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::ANY { + return Ty::ANY; // Any absorbs everything + } + if ty == Ty::NEVER { + continue; // Never is identity for union + } + if let TyData::Union(ref inner) = *self.get(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + // Sort for canonical form + flattened.sort_by_key(|t| t.0); + flattened.dedup(); + + match flattened.as_slice() { + [] => Ty::NEVER, + [only] => *only, + _ => self.intern(TyData::Union(flattened)), + } + } + + /// Create a sum (intersection) type. + pub fn sum(&mut self, mut types: Vec) -> Ty { + match types.as_slice() { + [] => return Ty::ANY, + [only] => return *only, + _ => {} + } + + // Flatten and simplify + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::NEVER { + return Ty::NEVER; // Never absorbs everything in intersection + } + if ty == Ty::ANY { + continue; // Any is identity for intersection + } + if let TyData::Sum(ref inner) = *self.get(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + flattened.sort_by_key(|t| t.0); + flattened.dedup(); + + match flattened.as_slice() { + [] => Ty::ANY, + [only] => *only, + _ => self.intern(TyData::Sum(flattened)), + } + } + + /// Create a bounded number type. + pub fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + self.intern(TyData::BoundedNumber(bounds)) + } + + /// Create a literal string type. + pub fn literal_string(&mut self, s: String) -> Ty { + self.intern(TyData::LiteralString(s)) + } + + /// Create a type variable. + pub fn type_var(&mut self, id: TyVarId, constraints: TyConstraints) -> Ty { + self.intern(TyData::TypeVar { id, constraints }) + } + + /// Create a fresh type variable with no constraints. + pub fn fresh_var(&mut self) -> Ty { + self.type_var(TyVarId::fresh(), TyConstraints::none()) + } +} diff --git a/crates/jrsonnet-lsp-types/src/store/impl_transform.rs b/crates/jrsonnet-lsp-types/src/store/impl_transform.rs new file mode 100644 index 00000000..a0bb9fc1 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store/impl_transform.rs @@ -0,0 +1,107 @@ +use super::*; + +impl TyStore { + /// Import a type from another store into this store. + /// + /// Well-known constants (ANY, NEVER, NULL, BOOL, TRUE, FALSE, NUMBER, STRING, CHAR) + /// are returned as-is since they have the same value across all stores. + /// Complex types are recursively imported and re-interned. + pub fn import_from(&mut self, ty: Ty, source: &S) -> Ty { + // Well-known constants are the same in all stores + if ty.is_well_known() { + return ty; + } + + // Complex types need re-interning + match source.get_data(ty) { + TyData::Array { elem, is_set } => { + let imported_elem = self.import_from(elem, source); + if is_set { + self.array_set(imported_elem) + } else { + self.array(imported_elem) + } + } + TyData::Tuple { elems } => { + let imported_elems: Vec<_> = + elems.iter().map(|&e| self.import_from(e, source)).collect(); + self.tuple(imported_elems) + } + TyData::Union(variants) => { + let imported_variants: Vec<_> = variants + .iter() + .map(|&v| self.import_from(v, source)) + .collect(); + self.union(imported_variants) + } + TyData::Object(obj) => { + let imported_fields: Vec<_> = obj + .fields + .iter() + .map(|(name, def)| { + ( + name.clone(), + FieldDefInterned { + ty: self.import_from(def.ty, source), + required: def.required, + visibility: def.visibility, + }, + ) + }) + .collect(); + self.object(ObjectData { + fields: imported_fields, + has_unknown: obj.has_unknown, + }) + } + TyData::Function(func) => { + let imported_params: Vec<_> = func + .params + .iter() + .map(|p| ParamInterned { + name: p.name.clone(), + ty: self.import_from(p.ty, source), + has_default: p.has_default, + }) + .collect(); + let imported_return_spec = match &func.return_spec { + ReturnSpec::Fixed(ret_ty) => { + ReturnSpec::Fixed(self.import_from(*ret_ty, source)) + } + other => other.clone(), + }; + self.intern(TyData::Function(FunctionData { + params: imported_params, + return_spec: imported_return_spec, + variadic: func.variadic, + })) + } + TyData::AttrsOf { value } => { + let imported_value = self.import_from(value, source); + self.intern(TyData::AttrsOf { + value: imported_value, + }) + } + TyData::BoundedNumber(bounds) => self.intern(TyData::BoundedNumber(bounds)), + TyData::LiteralString(s) => self.intern(TyData::LiteralString(s)), + TyData::TypeVar { id, constraints } => self.intern(TyData::TypeVar { id, constraints }), + TyData::Sum(variants) => { + let imported_variants: Vec<_> = variants + .iter() + .map(|&v| self.import_from(v, source)) + .collect(); + self.intern(TyData::Sum(imported_variants)) + } + // Primitives should have been caught by is_well_known(), but handle anyway + TyData::Any => Ty::ANY, + TyData::Never => Ty::NEVER, + TyData::Null => Ty::NULL, + TyData::Bool => Ty::BOOL, + TyData::True => Ty::TRUE, + TyData::False => Ty::FALSE, + TyData::Number => Ty::NUMBER, + TyData::String => Ty::STRING, + TyData::Char => Ty::CHAR, + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/store/ops_impl.rs b/crates/jrsonnet-lsp-types/src/store/ops_impl.rs new file mode 100644 index 00000000..310bbcf2 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store/ops_impl.rs @@ -0,0 +1,57 @@ +use super::*; + +impl TypeStoreOps for TyStore { + fn get_data(&self, ty: Ty) -> TyData { + self.get(ty).clone() + } + + fn display(&self, ty: Ty) -> String { + TyStore::display(self, ty) + } + + fn array(&mut self, elem: Ty) -> Ty { + TyStore::array(self, elem) + } + + fn array_set(&mut self, elem: Ty) -> Ty { + TyStore::array_set(self, elem) + } + + fn tuple(&mut self, elems: Vec) -> Ty { + TyStore::tuple(self, elems) + } + + fn object(&mut self, data: ObjectData) -> Ty { + TyStore::object(self, data) + } + + fn attrs_of(&mut self, value: Ty) -> Ty { + TyStore::attrs_of(self, value) + } + + fn function(&mut self, data: FunctionData) -> Ty { + TyStore::function(self, data) + } + + fn union(&mut self, types: Vec) -> Ty { + TyStore::union(self, types) + } + + fn sum(&mut self, types: Vec) -> Ty { + TyStore::sum(self, types) + } + + fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + TyStore::bounded_number(self, bounds) + } + + fn literal_string(&mut self, s: String) -> Ty { + TyStore::literal_string(self, s) + } +} + +impl Default for TyStore { + fn default() -> Self { + Self::new() + } +} diff --git a/crates/jrsonnet-lsp-types/src/store/thread_local_store.rs b/crates/jrsonnet-lsp-types/src/store/thread_local_store.rs new file mode 100644 index 00000000..612e345c --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store/thread_local_store.rs @@ -0,0 +1,18 @@ +use std::cell::RefCell; + +use super::*; + +// Thread-local store for convenient access during analysis +thread_local! { + static STORE: RefCell = RefCell::new(TyStore::new()); +} + +/// Execute a function with access to the thread-local type store. +pub fn with_store(f: impl FnOnce(&mut TyStore) -> R) -> R { + STORE.with(|s| f(&mut s.borrow_mut())) +} + +/// Reset the thread-local store (useful for tests). +pub fn reset_store() { + STORE.with(|s| *s.borrow_mut() = TyStore::new()); +} diff --git a/crates/jrsonnet-lsp-types/src/subst.rs b/crates/jrsonnet-lsp-types/src/subst.rs new file mode 100644 index 00000000..f00d5e53 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/subst.rs @@ -0,0 +1,27 @@ +//! Type substitution for merging local types into the global store. +//! +//! After file analysis, local types (created in `LocalTyStore`) need to be +//! merged into the global store. This module provides the substitution +//! mechanism that: +//! +//! 1. Topologically sorts local types by dependencies +//! 2. Re-interns each type into the global store +//! 3. Builds a mapping from local Ty → global Ty +//! 4. Allows applying this mapping to update references + +use rustc_hash::FxHashMap; + +use crate::store::Ty; + +/// Substitution mapping from local types to global types. +/// +/// After analysis, this is used to rewrite local `Ty` references +/// to their corresponding global `Ty` values. +#[derive(Debug, Clone, Default)] +pub struct TySubst { + /// Mapping from local Ty → global Ty. + mapping: FxHashMap, +} + +mod core; +mod merge; diff --git a/crates/jrsonnet-lsp-types/src/subst/core.rs b/crates/jrsonnet-lsp-types/src/subst/core.rs new file mode 100644 index 00000000..ad665eb3 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/subst/core.rs @@ -0,0 +1,39 @@ +use super::*; + +impl TySubst { + /// Create an empty substitution. + #[must_use] + pub fn new() -> Self { + Self::default() + } + /// Apply the substitution to a Ty. + /// + /// - Global types are returned unchanged + /// - Local types are looked up in the mapping + /// - Unknown local types return the original (shouldn't happen after merge) + #[must_use] + pub fn apply(&self, ty: Ty) -> Ty { + if ty.is_global() { + return ty; + } + self.mapping.get(&ty).copied().unwrap_or(ty) + } + + /// Check if this substitution contains a mapping for a type. + #[must_use] + pub fn contains(&self, ty: Ty) -> bool { + self.mapping.contains_key(&ty) + } + + /// Get the number of mappings. + #[must_use] + pub fn len(&self) -> usize { + self.mapping.len() + } + + /// Check if empty. + #[must_use] + pub fn is_empty(&self) -> bool { + self.mapping.is_empty() + } +} diff --git a/crates/jrsonnet-lsp-types/src/subst/merge.rs b/crates/jrsonnet-lsp-types/src/subst/merge.rs new file mode 100644 index 00000000..f543470b --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/subst/merge.rs @@ -0,0 +1,545 @@ +use rustc_hash::FxHashMap; + +use super::*; +use crate::{ + global_store::GlobalTyStore, + local_store::LocalTyStore, + store::{ + FieldDefInterned, FunctionData, ObjectData, ParamInterned, ReturnSpec, Ty, TyConstraints, + TyData, + }, +}; + +impl TySubst { + /// Merge local types into the global store, building the substitution map. + /// + /// This is the main entry point. It: + /// 1. Topologically sorts local types by dependency order + /// 2. Interns each into the global store (applying current substitutions) + /// 3. Records the local→global mapping + /// + /// # Arguments + /// - `global`: The global store to merge into (mutably borrowed) + /// - `local`: The local store to merge from + /// + /// # Returns + /// A substitution that can be used to rewrite local Ty references. + pub fn merge(global: &GlobalTyStore, local: &LocalTyStore) -> Self { + let mut subst = Self::new(); + + if local.is_empty() { + return subst; + } + + // Get topological ordering of local types + let order = Self::topological_sort(local); + + // Process each local type in dependency order + for local_ty in order { + let local_data = local.get_data(local_ty); + + // Apply current substitution to the type data + let substituted_data = subst.apply_to_data(local_data); + + // Intern into global store (handles deduplication) + let global_ty = global.intern(substituted_data); + + // Record the mapping + subst.mapping.insert(local_ty, global_ty); + } + + subst + } + /// Apply the substitution to `TyData`, rewriting all Ty references. + fn apply_to_data(&self, data: &TyData) -> TyData { + match data { + // Primitives have no Ty references + TyData::Any => TyData::Any, + TyData::Never => TyData::Never, + TyData::Null => TyData::Null, + TyData::Bool => TyData::Bool, + TyData::True => TyData::True, + TyData::False => TyData::False, + TyData::Number => TyData::Number, + TyData::String => TyData::String, + TyData::Char => TyData::Char, + TyData::BoundedNumber(bounds) => TyData::BoundedNumber(*bounds), + TyData::LiteralString(s) => TyData::LiteralString(s.clone()), + + // Compound types - recursively apply substitution + TyData::Array { elem, is_set } => TyData::Array { + elem: self.apply_for_merge(*elem), + is_set: *is_set, + }, + + TyData::Tuple { elems } => TyData::Tuple { + elems: elems.iter().map(|&e| self.apply_for_merge(e)).collect(), + }, + + TyData::Union(variants) => { + TyData::Union(variants.iter().map(|&v| self.apply_for_merge(v)).collect()) + } + + TyData::Sum(variants) => { + TyData::Sum(variants.iter().map(|&v| self.apply_for_merge(v)).collect()) + } + + TyData::Object(obj) => TyData::Object(ObjectData { + fields: obj + .fields + .iter() + .map(|(name, def)| { + ( + name.clone(), + FieldDefInterned { + ty: self.apply_for_merge(def.ty), + required: def.required, + visibility: def.visibility, + }, + ) + }) + .collect(), + has_unknown: obj.has_unknown, + }), + + TyData::AttrsOf { value } => TyData::AttrsOf { + value: self.apply_for_merge(*value), + }, + + TyData::Function(func) => TyData::Function(FunctionData { + params: func + .params + .iter() + .map(|p| ParamInterned { + name: p.name.clone(), + ty: self.apply_for_merge(p.ty), + has_default: p.has_default, + }) + .collect(), + return_spec: match &func.return_spec { + ReturnSpec::Fixed(ret) => ReturnSpec::Fixed(self.apply_for_merge(*ret)), + other => other.clone(), + }, + variadic: func.variadic, + }), + + TyData::TypeVar { id, constraints } => TyData::TypeVar { + id: *id, + constraints: TyConstraints { + must_be_indexable: constraints.must_be_indexable, + must_support_fields: constraints.must_support_fields, + must_be_callable: constraints.must_be_callable, + upper_bound: constraints.upper_bound.map(|b| self.apply_for_merge(b)), + }, + }, + } + } + + /// Apply substitution during merge. + /// + /// Any unresolved local reference is converted to `any` instead of leaking a + /// local `Ty` into the global store. + fn apply_for_merge(&self, ty: Ty) -> Ty { + if ty.is_global() { + return ty; + } + self.mapping.get(&ty).copied().unwrap_or(Ty::ANY) + } + + /// Topologically sort local types by dependency order. + /// + /// Types that don't depend on other local types come first. + /// This ensures that when we process a type, all its dependencies + /// have already been mapped to global types. + fn topological_sort(local: &LocalTyStore) -> Vec { + let types: Vec<_> = local.iter().collect(); + let n = types.len(); + + if n == 0 { + return vec![]; + } + + // Build adjacency list: edges[i] = types that type i depends on + let mut in_degree: FxHashMap = FxHashMap::default(); + let mut dependents: FxHashMap> = FxHashMap::default(); + + for &(ty, _) in &types { + in_degree.insert(ty, 0); + dependents.insert(ty, Vec::new()); + } + + // Count dependencies (only local ones matter) + for &(ty, data) in &types { + let deps = Self::get_local_dependencies(data); + let Some(in_degree_entry) = in_degree.get_mut(&ty) else { + continue; + }; + *in_degree_entry = deps.len(); + for dep in deps { + if let Some(dep_list) = dependents.get_mut(&dep) { + dep_list.push(ty); + } + } + } + + // Kahn's algorithm for topological sort + let mut result = Vec::with_capacity(n); + let mut queue: Vec = in_degree + .iter() + .filter(|(_, °)| deg == 0) + .map(|(&ty, _)| ty) + .collect(); + + while let Some(ty) = queue.pop() { + result.push(ty); + + if let Some(deps) = dependents.get(&ty) { + for &dependent in deps { + if let Some(deg) = in_degree.get_mut(&dependent) { + *deg -= 1; + if *deg == 0 { + queue.push(dependent); + } + } + } + } + } + + // If we didn't process all types, there's a cycle. + // This shouldn't happen with well-formed types, but handle gracefully. + if result.len() < n { + // Add remaining types in arbitrary order + for &(ty, _) in &types { + if !result.contains(&ty) { + result.push(ty); + } + } + } + + result + } + + /// Get local Ty references in a `TyData`. + fn get_local_dependencies(data: &TyData) -> Vec { + let mut deps = Vec::new(); + Self::collect_local_refs(data, &mut deps); + deps + } + + /// Recursively collect local Ty references. + fn collect_local_refs(data: &TyData, deps: &mut Vec) { + match data { + TyData::Array { elem, .. } => { + if elem.is_local() { + deps.push(*elem); + } + } + TyData::Tuple { elems } => { + for &e in elems { + if e.is_local() { + deps.push(e); + } + } + } + TyData::Union(variants) | TyData::Sum(variants) => { + for &v in variants { + if v.is_local() { + deps.push(v); + } + } + } + TyData::Object(obj) => { + for (_, def) in &obj.fields { + if def.ty.is_local() { + deps.push(def.ty); + } + } + } + TyData::AttrsOf { value } => { + if value.is_local() { + deps.push(*value); + } + } + TyData::Function(func) => { + for p in &func.params { + if p.ty.is_local() { + deps.push(p.ty); + } + } + if let ReturnSpec::Fixed(ret) = &func.return_spec { + if ret.is_local() { + deps.push(*ret); + } + } + } + TyData::TypeVar { constraints, .. } => { + if let Some(bound) = constraints.upper_bound { + if bound.is_local() { + deps.push(bound); + } + } + } + // Primitives have no references + _ => {} + } + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + + #[test] + fn test_subst_empty_local() { + let global = GlobalTyStore::new(); + let local = LocalTyStore::new(); + + let subst = TySubst::merge(&global, &local); + assert!(subst.is_empty()); + } + + #[test] + fn test_subst_simple_type() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create a local array type + let arr = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + assert!(arr.is_local()); + + let subst = TySubst::merge(&global, &local); + assert_eq!(subst.len(), 1); + + // The mapped type should be global + let global_arr = subst.apply(arr); + assert!(global_arr.is_global()); + + // Verify the data is correct + let data = global.get_data(global_arr); + assert_matches!(data, TyData::Array { elem, .. } if elem == Ty::NUMBER); + } + + #[test] + fn test_subst_nested_types() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create nested local types: Array> + let inner = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let outer = local.intern(TyData::Array { + elem: inner, + is_set: false, + }); + + assert!(inner.is_local()); + assert!(outer.is_local()); + + let subst = TySubst::merge(&global, &local); + assert_eq!(subst.len(), 2); + + // Both should be mapped to global + let global_inner = subst.apply(inner); + let global_outer = subst.apply(outer); + assert!(global_inner.is_global()); + assert!(global_outer.is_global()); + + // Verify the outer type references the global inner + let outer_data = global.get_data(global_outer); + assert_matches!(outer_data, TyData::Array { elem, .. } if elem == global_inner); + } + + #[test] + fn test_subst_global_types_unchanged() { + let subst = TySubst::new(); + + // Global types should pass through unchanged + assert_eq!(subst.apply(Ty::NUMBER), Ty::NUMBER); + assert_eq!(subst.apply(Ty::STRING), Ty::STRING); + assert_eq!(subst.apply(Ty::ANY), Ty::ANY); + } + + #[test] + fn test_subst_deduplication() { + let global = GlobalTyStore::new(); + + // Pre-intern a type in global + let global_arr = global.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + // Create the same type locally + let mut local = LocalTyStore::new(); + let local_arr = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + let subst = TySubst::merge(&global, &local); + + // Should map to the existing global type + assert_eq!(subst.apply(local_arr), global_arr); + } + + #[test] + fn test_subst_union_type() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create a local union type + let union = local.intern(TyData::Union(vec![Ty::NUMBER, Ty::STRING])); + + let subst = TySubst::merge(&global, &local); + + let global_union = subst.apply(union); + assert!(global_union.is_global()); + + let data = global.get_data(global_union); + assert_matches!(data, TyData::Union(variants) if variants == vec![Ty::NUMBER, Ty::STRING]); + } + + #[test] + fn test_subst_object_type() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create a local object type + let obj = local.intern(TyData::Object(ObjectData { + fields: vec![( + "name".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: crate::store::FieldVis::Normal, + }, + )], + has_unknown: false, + })); + + let subst = TySubst::merge(&global, &local); + + let global_obj = subst.apply(obj); + assert!(global_obj.is_global()); + } + + #[test] + fn test_topological_sort_independent() { + let mut local = LocalTyStore::new(); + + // Create independent types (no dependencies between them) + let _arr1 = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let _arr2 = local.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + + let order = TySubst::topological_sort(&local); + assert_eq!(order.len(), 2); + } + + #[test] + fn test_topological_sort_dependent() { + let mut local = LocalTyStore::new(); + + // Create dependent types + let inner = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let outer = local.intern(TyData::Array { + elem: inner, + is_set: false, + }); + + let order = TySubst::topological_sort(&local); + // inner should come before outer (inner has no deps, outer depends on inner) + assert_eq!(order, vec![inner, outer]); + } + + #[test] + fn test_merge_cycle_does_not_leave_local_refs_in_global() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Construct a direct cycle between local types: + // L0 = array, L1 = array + let local_0 = local.intern(TyData::Array { + elem: Ty::from_raw_local(1), + is_set: false, + }); + let local_1 = local.intern(TyData::Array { + elem: Ty::from_raw_local(0), + is_set: false, + }); + + let subst = TySubst::merge(&global, &local); + + let global_0 = subst.apply(local_0); + let global_1 = subst.apply(local_1); + assert!(global_0.is_global()); + assert!(global_1.is_global()); + + let data_0 = global.get_data(global_0); + let data_1 = global.get_data(global_1); + assert!( + !type_data_contains_local_refs(&data_0), + "first merged global type still contains local refs: {data_0:?}" + ); + assert!( + !type_data_contains_local_refs(&data_1), + "second merged global type still contains local refs: {data_1:?}" + ); + } + + fn type_data_contains_local_refs(data: &TyData) -> bool { + match data { + TyData::Array { elem, .. } => elem.is_local(), + TyData::Tuple { elems } => elems.iter().any(|ty| ty.is_local()), + TyData::Union(variants) | TyData::Sum(variants) => { + variants.iter().any(|ty| ty.is_local()) + } + TyData::Object(obj) => obj.fields.iter().any(|(_, field)| field.ty.is_local()), + TyData::AttrsOf { value } => value.is_local(), + TyData::Function(func) => { + func.params.iter().any(|param| param.ty.is_local()) + || match &func.return_spec { + ReturnSpec::Fixed(ty) => ty.is_local(), + ReturnSpec::SameAsArg(_) + | ReturnSpec::ArrayOfArg(_) + | ReturnSpec::ArrayWithSameElements(_) + | ReturnSpec::SetWithSameElements(_) + | ReturnSpec::FuncReturnType(_) + | ReturnSpec::ArrayOfFuncReturn(_) + | ReturnSpec::FlatMapResult(_) + | ReturnSpec::NonNegative + | ReturnSpec::ObjectValuesType(_) => false, + } + } + TyData::TypeVar { constraints, .. } => { + constraints.upper_bound.is_some_and(Ty::is_local) + } + TyData::Any + | TyData::Never + | TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::String + | TyData::Char + | TyData::BoundedNumber(_) + | TyData::LiteralString(_) => false, + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/unification.rs b/crates/jrsonnet-lsp-types/src/unification.rs new file mode 100644 index 00000000..17b11119 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/unification.rs @@ -0,0 +1,34 @@ +//! Type unification with structural subtyping and variance. +//! +//! This module implements type unification for Jsonnet's type system. +//! Unification checks whether a "got" type can be used where an "expected" +//! type is required, with proper handling of variance for function types. +//! +//! # Variance +//! +//! - **Covariant**: Most positions. `A <: B` means `A` can substitute for `B`. +//! More specific types can be used where more general types are expected. +//! Example: `Array` can be used where `Array` is expected. +//! +//! - **Contravariant**: Function parameters only. The relationship flips. +//! A function accepting `Any` can substitute for one expecting `Number`, +//! because it can handle all the inputs the original could. +//! +//! # Examples +//! +//! ```ignore +//! // Covariant: Number <: Any, so Array <: Array +//! unify(Array, Array, Covariant) // Ok +//! +//! // Contravariant for params: fn(Any) can substitute for fn(Number) +//! unify(fn(Any) -> String, fn(Number) -> String, Covariant) // Ok +//! +//! // But fn(Number) cannot substitute for fn(Any) +//! unify(fn(Number) -> String, fn(Any) -> String, Covariant) // Err +//! ``` + +mod algorithm; +mod types; + +pub use algorithm::{is_subtype_ty, types_equivalent_ty, unify_ty}; +pub use types::{PathElement, UnifyError, UnifyReason, UnifyResult, Variance}; diff --git a/crates/jrsonnet-lsp-types/src/unification/algorithm.rs b/crates/jrsonnet-lsp-types/src/unification/algorithm.rs new file mode 100644 index 00000000..288d9866 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/unification/algorithm.rs @@ -0,0 +1,678 @@ +use super::types::{PathElement, UnifyError, UnifyReason, UnifyResult, Variance}; +use crate::store::{FunctionData, ObjectData, ReturnSpec, Ty, TyData, TypeStoreOps}; + +pub fn unify_ty( + store: &S, + got: Ty, + expected: Ty, + variance: Variance, +) -> UnifyResult { + // Fast path: identical types always unify + if got == expected { + return Ok(()); + } + + // Fast paths for well-known types + if got == Ty::NEVER { + return Ok(()); // Never is bottom, unifies with anything + } + if expected == Ty::ANY { + return Ok(()); // Any accepts everything in covariant position + } + if got == Ty::ANY && variance == Variance::Contravariant { + return Ok(()); // Any in contravariant position + } + + // Delegate to the full implementation + unify_ty_impl(store, got, expected, variance) +} + +/// Internal unification implementation. +fn unify_ty_impl( + store: &S, + got: Ty, + expected: Ty, + variance: Variance, +) -> UnifyResult { + let got_data = store.get_data(got); + let expected_data = store.get_data(expected); + + // Handle top and bottom types + match (&got_data, &expected_data) { + // Never is bottom type - unifies with anything + // Any = Any regardless of variance + (TyData::Never, _) | (TyData::Any, TyData::Any) => return Ok(()), + + // When expected is Any (and got is not Any) + (_, TyData::Any) => { + return match variance { + Variance::Covariant => Ok(()), + Variance::Contravariant => Err(make_error(got, expected)), + }; + } + + // When got is Any (and expected is not Any) + (TyData::Any, _) => { + return match variance { + Variance::Contravariant => Ok(()), + Variance::Covariant => Err(make_error(got, expected)), + }; + } + + // Expecting Never means unreachable code - only Never satisfies it + (_, TyData::Never) => return Err(make_error(got, expected)), + + // Type variables unify with any type + (TyData::TypeVar { constraints, .. }, _) => { + if let Some(bound) = &constraints.upper_bound { + return unify_ty(store, *bound, expected, variance); + } + return Ok(()); + } + (_, TyData::TypeVar { constraints, .. }) => { + if let Some(bound) = &constraints.upper_bound { + return unify_ty(store, got, *bound, variance); + } + return Ok(()); + } + + _ => {} + } + + // Handle unions + if let TyData::Union(got_variants) = store.get_data(got) { + // All variants of got must unify with expected + for variant in got_variants { + unify_ty(store, variant, expected, variance)?; + } + return Ok(()); + } + + if let TyData::Union(expected_variants) = store.get_data(expected) { + // Got must unify with at least one expected variant + // Collect all errors for comprehensive reporting + let mut variant_errors = Vec::new(); + for variant in expected_variants { + match unify_ty(store, got, variant, variance) { + Ok(()) => return Ok(()), + Err(e) => variant_errors.push(e), + } + } + // None of the variants matched - report all errors + return Err(UnifyError::new( + got, + expected, + UnifyReason::UnionMismatch(variant_errors), + )); + } + + // Re-fetch data for the concrete type matching + let got_data = store.get_data(got); + let expected_data = store.get_data(expected); + + // Now handle concrete type pairs + match (&got_data, &expected_data) { + // Primitives must match exactly + (TyData::Null, TyData::Null) + | (TyData::Bool | TyData::True | TyData::False, TyData::Bool) + | (TyData::Number | TyData::BoundedNumber(_), TyData::Number) + | (TyData::String | TyData::Char | TyData::LiteralString(_), TyData::String) + | (TyData::Char, TyData::Char) + | (TyData::True, TyData::True) + | (TyData::False, TyData::False) => Ok(()), + (TyData::LiteralString(a), TyData::LiteralString(b)) if a == b => Ok(()), + + // BoundedNumber with narrower bounds is subtype of wider + (TyData::BoundedNumber(got_bounds), TyData::BoundedNumber(expected_bounds)) => { + if got_bounds.is_subset_of(expected_bounds) { + Ok(()) + } else { + Err(make_error(got, expected)) + } + } + + // Arrays are covariant in element type + ( + TyData::Array { elem: got_elem, .. }, + TyData::Array { + elem: expected_elem, + .. + }, + ) => unify_ty(store, *got_elem, *expected_elem, variance) + .map_err(|e| e.with_path(PathElement::ArrayElement)), + + // Tuple to Array: all tuple elements must unify with array element + ( + TyData::Tuple { elems }, + TyData::Array { + elem: expected_elem, + .. + }, + ) => { + for (i, elem) in elems.iter().enumerate() { + unify_ty(store, *elem, *expected_elem, variance) + .map_err(|e| e.with_path(PathElement::TupleElement(i)))?; + } + Ok(()) + } + + // Tuples must have same length, elements unify positionally + ( + TyData::Tuple { elems: got_elems }, + TyData::Tuple { + elems: expected_elems, + }, + ) => { + if got_elems.len() != expected_elems.len() { + return Err(make_error(got, expected)); + } + for (i, (g, e)) in got_elems.iter().zip(expected_elems.iter()).enumerate() { + unify_ty(store, *g, *e, variance) + .map_err(|err| err.with_path(PathElement::TupleElement(i)))?; + } + Ok(()) + } + + // Objects use structural subtyping + (TyData::Object(got_obj), TyData::Object(expected_obj)) => { + unify_objects_ty(store, got_obj, expected_obj, variance, got, expected) + } + + // AttrsOf is covariant in T + ( + TyData::AttrsOf { value: got_value }, + TyData::AttrsOf { + value: expected_value, + }, + ) => unify_ty(store, *got_value, *expected_value, variance) + .map_err(|e| e.with_path(PathElement::Field("[*]".to_string()))), + + // Object with known fields can be used where AttrsOf is expected + ( + TyData::Object(got_obj), + TyData::AttrsOf { + value: expected_value, + }, + ) => { + for (field_name, field_def) in &got_obj.fields { + unify_ty(store, field_def.ty, *expected_value, variance) + .map_err(|e| e.with_path(PathElement::Field(field_name.clone())))?; + } + Ok(()) + } + + // AttrsOf can be used where open Object is expected + (TyData::AttrsOf { .. }, TyData::Object(expected_obj)) => { + if expected_obj.has_unknown && expected_obj.fields.is_empty() { + Ok(()) + } else { + Err(make_error(got, expected)) + } + } + + // Functions have contravariant parameters, covariant return + (TyData::Function(got_fn), TyData::Function(expected_fn)) => { + unify_functions_ty(store, got_fn, expected_fn, variance, got, expected) + } + + // All other combinations are mismatches + _ => Err(make_error(got, expected)), + } +} + +/// Unify two object types structurally. +fn unify_objects_ty( + store: &S, + got: &ObjectData, + expected: &ObjectData, + variance: Variance, + got_ty: Ty, + expected_ty: Ty, +) -> UnifyResult { + // Check that got has all required fields from expected + for (field_name, expected_field) in &expected.fields { + match got.fields.iter().find(|(n, _)| n == field_name) { + Some((_, got_field)) => { + // Field exists - unify the types + unify_ty(store, got_field.ty, expected_field.ty, variance) + .map_err(|e| e.with_path(PathElement::Field(field_name.clone())))?; + } + None => { + // Field missing - error if required and got is closed + if expected_field.required && !got.has_unknown { + return Err(UnifyError::new( + got_ty, + expected_ty, + UnifyReason::MissingField(field_name.clone()), + )); + } + } + } + } + + // If expected is closed, got cannot have extra fields + if !expected.has_unknown { + for (field_name, _) in &got.fields { + if !expected.fields.iter().any(|(n, _)| n == field_name) { + return Err(UnifyError::new( + got_ty, + expected_ty, + UnifyReason::ExtraField(field_name.clone()), + )); + } + } + } + + Ok(()) +} + +/// Unify two function types with proper variance handling. +fn unify_functions_ty( + store: &S, + got: &FunctionData, + expected: &FunctionData, + variance: Variance, + got_ty: Ty, + expected_ty: Ty, +) -> UnifyResult { + // Check parameter count compatibility + let got_required = got.params.iter().filter(|p| !p.has_default).count(); + let expected_required = expected.params.iter().filter(|p| !p.has_default).count(); + + if got_required > expected.params.len() || got.params.len() < expected_required { + return Err(UnifyError::new( + got_ty, + expected_ty, + UnifyReason::ParamCountMismatch { + got: got.params.len(), + expected: expected.params.len(), + }, + )); + } + + // Parameters are CONTRAVARIANT: swap argument order + for (i, (got_param, expected_param)) in + got.params.iter().zip(expected.params.iter()).enumerate() + { + let got_param_ty = got_param.ty; + let expected_param_ty = expected_param.ty; + + // Swap argument order for contravariant position + unify_ty(store, expected_param_ty, got_param_ty, variance) + .map_err(|e| e.with_path(PathElement::Parameter(i)))?; + } + + // Return type is COVARIANT + // Extract fixed return type or default to Any for dynamic specs + let got_return = match &got.return_spec { + ReturnSpec::Fixed(ty) => *ty, + _ => Ty::ANY, // Dynamic return specs can't be checked statically + }; + let expected_return = match &expected.return_spec { + ReturnSpec::Fixed(ty) => *ty, + _ => Ty::ANY, + }; + + unify_ty(store, got_return, expected_return, variance) + .map_err(|e| e.with_path(PathElement::ReturnType)) +} + +/// Create a `UnifyError` for type mismatch. +fn make_error(got: Ty, expected: Ty) -> UnifyError { + UnifyError::new(got, expected, UnifyReason::TypeMismatch) +} + +/// Check if `subtype` is a subtype of `supertype`. +pub fn is_subtype_ty(store: &S, subtype: Ty, supertype: Ty) -> bool { + // Fast path: identical types + if subtype == supertype { + return true; + } + + // Fast paths for well-known types + if subtype == Ty::NEVER { + return true; // Never <: everything + } + if supertype == Ty::ANY { + return true; // everything <: Any + } + + unify_ty(store, subtype, supertype, Variance::Covariant).is_ok() +} + +/// Check if two types are equivalent. +pub fn types_equivalent_ty(store: &S, a: Ty, b: Ty) -> bool { + if a == b { + return true; + } + is_subtype_ty(store, a, b) && is_subtype_ty(store, b, a) +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + use crate::store::{ + FieldDefInterned, FieldVis, FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, + Ty, TyStore, + }; + + /// Helper to create a simple function type + fn func_ty(store: &mut TyStore, params: Vec, ret: Ty) -> Ty { + let params: Vec = params + .into_iter() + .enumerate() + .map(|(i, ty)| ParamInterned { + name: format!("p{i}"), + ty, + has_default: false, + }) + .collect(); + store.function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(ret), + variadic: false, + }) + } + + /// Helper to create an object type + fn obj_ty(store: &mut TyStore, fields: Vec<(&str, Ty, bool)>, has_unknown: bool) -> Ty { + let fields = fields + .into_iter() + .map(|(name, ty, required)| { + ( + name.to_string(), + FieldDefInterned { + ty, + required, + visibility: FieldVis::Normal, + }, + ) + }) + .collect(); + store.object(ObjectData { + fields, + has_unknown, + }) + } + + #[test] + fn test_primitives_unify() { + let store = TyStore::new(); + unify_ty(&store, Ty::NUMBER, Ty::NUMBER, Variance::Covariant) + .expect("Number unifies with Number"); + unify_ty(&store, Ty::STRING, Ty::STRING, Variance::Covariant) + .expect("String unifies with String"); + unify_ty(&store, Ty::BOOL, Ty::BOOL, Variance::Covariant).expect("Bool unifies with Bool"); + unify_ty(&store, Ty::NULL, Ty::NULL, Variance::Covariant).expect("Null unifies with Null"); + } + + #[test] + fn test_any_accepts_all() { + let store = TyStore::new(); + unify_ty(&store, Ty::NUMBER, Ty::ANY, Variance::Covariant).expect("Number subtype of Any"); + unify_ty(&store, Ty::STRING, Ty::ANY, Variance::Covariant).expect("String subtype of Any"); + unify_ty(&store, Ty::BOOL, Ty::ANY, Variance::Covariant).expect("Bool subtype of Any"); + } + + #[test] + fn test_never_is_bottom() { + let store = TyStore::new(); + unify_ty(&store, Ty::NEVER, Ty::NUMBER, Variance::Covariant) + .expect("Never subtype of Number"); + unify_ty(&store, Ty::NEVER, Ty::STRING, Variance::Covariant) + .expect("Never subtype of String"); + unify_ty(&store, Ty::NEVER, Ty::ANY, Variance::Covariant).expect("Never subtype of Any"); + } + + #[test] + fn test_primitive_mismatch() { + let store = TyStore::new(); + let err1 = unify_ty(&store, Ty::NUMBER, Ty::STRING, Variance::Covariant) + .expect_err("Number != String"); + assert_matches!(err1.reason, UnifyReason::TypeMismatch); + let err2 = unify_ty(&store, Ty::BOOL, Ty::NUMBER, Variance::Covariant) + .expect_err("Bool != Number"); + assert_matches!(err2.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_char_subtype_of_string() { + let store = TyStore::new(); + unify_ty(&store, Ty::CHAR, Ty::STRING, Variance::Covariant) + .expect("Char subtype of String"); + let err = unify_ty(&store, Ty::STRING, Ty::CHAR, Variance::Covariant) + .expect_err("String not subtype of Char"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_bool_literals() { + let store = TyStore::new(); + unify_ty(&store, Ty::TRUE, Ty::BOOL, Variance::Covariant).expect("True subtype of Bool"); + unify_ty(&store, Ty::FALSE, Ty::BOOL, Variance::Covariant).expect("False subtype of Bool"); + let err = unify_ty(&store, Ty::BOOL, Ty::TRUE, Variance::Covariant) + .expect_err("Bool not subtype of True"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_array_covariance() { + let mut store = TyStore::new(); + let arr_num = store.array(Ty::NUMBER); + let arr_any = store.array(Ty::ANY); + + unify_ty(&store, arr_num, arr_any, Variance::Covariant) + .expect("Array subtype of Array"); + let err = unify_ty(&store, arr_any, arr_num, Variance::Covariant) + .expect_err("Array not subtype of Array"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_tuple_to_array() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::NUMBER]); + let arr_num = store.array(Ty::NUMBER); + + unify_ty(&store, tuple, arr_num, Variance::Covariant) + .expect("Tuple[Number, Number] subtype of Array"); + } + + #[test] + fn test_function_param_contravariance() { + let mut store = TyStore::new(); + let fn_any_to_num = func_ty(&mut store, vec![Ty::ANY], Ty::NUMBER); + let fn_num_to_num = func_ty(&mut store, vec![Ty::NUMBER], Ty::NUMBER); + + // fn(Any) -> Number can substitute for fn(Number) -> Number + unify_ty(&store, fn_any_to_num, fn_num_to_num, Variance::Covariant) + .expect("fn(Any)->Number subtype of fn(Number)->Number"); + + // fn(Number) -> Number cannot substitute for fn(Any) -> Number + let err = unify_ty(&store, fn_num_to_num, fn_any_to_num, Variance::Covariant) + .expect_err("fn(Number)->Number not subtype of fn(Any)->Number"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_function_return_covariance() { + let mut store = TyStore::new(); + let fn_to_num = func_ty(&mut store, vec![], Ty::NUMBER); + let fn_to_any = func_ty(&mut store, vec![], Ty::ANY); + + // fn() -> Number can substitute for fn() -> Any + unify_ty(&store, fn_to_num, fn_to_any, Variance::Covariant) + .expect("fn()->Number subtype of fn()->Any"); + + // fn() -> Any cannot substitute for fn() -> Number + let err = unify_ty(&store, fn_to_any, fn_to_num, Variance::Covariant) + .expect_err("fn()->Any not subtype of fn()->Number"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_object_structural_subtyping() { + let mut store = TyStore::new(); + let obj_ab = obj_ty( + &mut store, + vec![("a", Ty::NUMBER, true), ("b", Ty::STRING, true)], + false, + ); + let obj_a = obj_ty(&mut store, vec![("a", Ty::NUMBER, true)], true); + + // Object with more fields can be used where fewer are required (if open) + unify_ty(&store, obj_ab, obj_a, Variance::Covariant).expect("{a,b} subtype of {a,...}"); + } + + #[test] + fn test_object_missing_field() { + let mut store = TyStore::new(); + let obj_a = obj_ty(&mut store, vec![("a", Ty::NUMBER, true)], false); + let obj_ab = obj_ty( + &mut store, + vec![("a", Ty::NUMBER, true), ("b", Ty::STRING, true)], + false, + ); + + // Closed object with fewer fields cannot match one requiring more + let err = unify_ty(&store, obj_a, obj_ab, Variance::Covariant) + .expect_err("{a} not subtype of {a,b}"); + assert_matches!(err.reason, UnifyReason::MissingField { .. }); + } + + #[test] + fn test_bounded_number() { + let mut store = TyStore::new(); + let narrow = store.bounded_number(NumBounds::between(0.0, 10.0)); + let wide = store.bounded_number(NumBounds::between(-100.0, 100.0)); + + // Narrow bounds are subtype of wider bounds + unify_ty(&store, narrow, wide, Variance::Covariant).expect("[0,10] subtype of [-100,100]"); + let err1 = unify_ty(&store, wide, narrow, Variance::Covariant) + .expect_err("[-100,100] not subtype of [0,10]"); + assert_matches!(err1.reason, UnifyReason::TypeMismatch); + + // BoundedNumber is subtype of Number + unify_ty(&store, narrow, Ty::NUMBER, Variance::Covariant) + .expect("[0,10] subtype of Number"); + let err2 = unify_ty(&store, Ty::NUMBER, narrow, Variance::Covariant) + .expect_err("Number not subtype of [0,10]"); + assert_matches!(err2.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_union_subtyping() { + let mut store = TyStore::new(); + let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); + + // Number is subtype of (Number | String) + unify_ty(&store, Ty::NUMBER, num_or_str, Variance::Covariant) + .expect("Number subtype of (Number|String)"); + unify_ty(&store, Ty::STRING, num_or_str, Variance::Covariant) + .expect("String subtype of (Number|String)"); + + // (Number | String) is NOT subtype of Number + let err = unify_ty(&store, num_or_str, Ty::NUMBER, Variance::Covariant) + .expect_err("(Number|String) not subtype of Number"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_is_subtype() { + let store = TyStore::new(); + assert!(is_subtype_ty(&store, Ty::NUMBER, Ty::ANY)); + assert!(is_subtype_ty(&store, Ty::NEVER, Ty::NUMBER)); + assert!(!is_subtype_ty(&store, Ty::NUMBER, Ty::STRING)); + } + + #[test] + fn test_types_equivalent() { + let store = TyStore::new(); + assert!(types_equivalent_ty(&store, Ty::NUMBER, Ty::NUMBER)); + assert!(!types_equivalent_ty(&store, Ty::NUMBER, Ty::ANY)); + } + + #[test] + fn test_union_mismatch_error() { + let mut store = TyStore::new(); + let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); + + // Bool cannot unify with (Number | String), should get comprehensive error + let result = unify_ty(&store, Ty::BOOL, num_or_str, Variance::Covariant); + + // Should be a UnionMismatch with errors for each variant + assert_matches!(result, Err(UnifyError { reason: UnifyReason::UnionMismatch(ref errs), .. }) => { + // Should have two errors - one for each union variant + assert_eq!( + errs.iter().map(|e| &e.reason).collect::>(), + vec![ + &UnifyReason::TypeMismatch, + &UnifyReason::TypeMismatch, + ] + ); + }); + } + + #[test] + fn test_union_mismatch_error_format() { + let mut store = TyStore::new(); + let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); + + let result = unify_ty(&store, Ty::BOOL, num_or_str, Variance::Covariant); + let err = result.expect_err("should fail"); + let formatted = err.format(&store); + + // Should show each variant type that was tried + assert!( + formatted.contains("tried `number`: type mismatch"), + "Should show number variant: {formatted}" + ); + assert!( + formatted.contains("tried `string`: type mismatch"), + "Should show string variant: {formatted}" + ); + } + + #[test] + fn test_union_mismatch_with_object_error() { + let mut store = TyStore::new(); + + // Create an object type with required field 'a' + let obj_with_a = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + + // Create union: Number | {a: number} + let union = store.union(vec![Ty::NUMBER, obj_with_a]); + + // Empty object should fail with specific error for each variant + let empty_obj = store.object(ObjectData { + fields: vec![], + has_unknown: false, + }); + + let result = unify_ty(&store, empty_obj, union, Variance::Covariant); + let err = result.expect_err("should fail"); + let formatted = err.format(&store); + + // Should show type mismatch for number and missing field for object + assert!( + formatted.contains("tried `number`"), + "Should show number variant: {formatted}" + ); + assert!( + formatted.contains("missing field 'a'"), + "Should show missing field error: {formatted}" + ); + } +} diff --git a/crates/jrsonnet-lsp-types/src/unification/types.rs b/crates/jrsonnet-lsp-types/src/unification/types.rs new file mode 100644 index 00000000..ecc5d268 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/unification/types.rs @@ -0,0 +1,161 @@ +use std::fmt::Write as _; + +use crate::store::{Ty, TypeStoreOps}; + +/// Variance determines how subtyping propagates through type constructors. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Variance { + /// Normal subtyping direction. More specific types can substitute for general ones. + Covariant, + /// Reversed subtyping direction. Used for function parameters. + Contravariant, +} + +impl Variance { + /// Flip variance (used when descending into contravariant positions). + #[must_use] + pub fn flip(self) -> Self { + match self { + Variance::Covariant => Variance::Contravariant, + Variance::Contravariant => Variance::Covariant, + } + } +} + +/// Path element describing where in the type structure an error occurred. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PathElement { + /// In a function parameter at the given index (0-based). + Parameter(usize), + /// In the return type of a function. + ReturnType, + /// In an object field with the given name. + Field(String), + /// In an array element type. + ArrayElement, + /// In a tuple element at the given index. + TupleElement(usize), + /// In a union variant. + UnionVariant, +} + +impl std::fmt::Display for PathElement { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + PathElement::Parameter(i) => write!(f, "parameter {}", i + 1), + PathElement::ReturnType => write!(f, "return type"), + PathElement::Field(name) => write!(f, "field '{name}'"), + PathElement::ArrayElement => write!(f, "array element"), + PathElement::TupleElement(i) => write!(f, "element {i}"), + PathElement::UnionVariant => write!(f, "union variant"), + } + } +} + +/// Reason why unification failed. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum UnifyReason { + /// Incompatible base types (e.g., Number vs String). + TypeMismatch, + /// Object is missing a required field. + MissingField(String), + /// Closed object has an unexpected field. + ExtraField(String), + /// Function parameter count doesn't match. + ParamCountMismatch { got: usize, expected: usize }, + /// Nested unification error (for recursive structures). + Nested(Box), + /// Failed to unify with any variant of a union type. + /// Contains the errors from attempting each variant. + UnionMismatch(Vec), +} + +/// Detailed error explaining why unification failed. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct UnifyError { + /// The type that was provided (interned). + pub got: Ty, + /// The type that was expected (interned). + pub expected: Ty, + /// Why unification failed. + pub reason: UnifyReason, + /// Path to where the error occurred (innermost first). + pub path: Vec, +} + +impl UnifyError { + /// Create a new unification error. + #[must_use] + pub fn new(got: Ty, expected: Ty, reason: UnifyReason) -> Self { + Self { + got, + expected, + reason, + path: Vec::new(), + } + } + + /// Add a path element to track where in the type structure the error occurred. + #[must_use] + pub fn with_path(mut self, element: PathElement) -> Self { + self.path.push(element); + self + } + + /// Format the error for display. + pub fn format(&self, store: &S) -> String { + let mut msg = format!( + "type mismatch: expected `{}`, got `{}`", + store.display(self.expected), + store.display(self.got) + ); + + if !self.path.is_empty() { + msg.push_str(" in "); + for (i, element) in self.path.iter().rev().enumerate() { + if i > 0 { + msg.push_str(" -> "); + } + msg.push_str(&element.to_string()); + } + } + + match &self.reason { + UnifyReason::TypeMismatch => {} + UnifyReason::MissingField(name) => { + let _ = write!(msg, " (missing required field '{name}')"); + } + UnifyReason::ExtraField(name) => { + let _ = write!(msg, " (unexpected field '{name}')"); + } + UnifyReason::ParamCountMismatch { got, expected } => { + let _ = write!(msg, " (expected {expected} parameters, got {got})"); + } + UnifyReason::Nested(inner) => { + let _ = write!(msg, " caused by: {}", inner.format(store)); + } + UnifyReason::UnionMismatch(variant_errors) => { + msg.push_str(" (no matching union variant)"); + for err in variant_errors { + // Show the variant type that was attempted + let reason = match &err.reason { + UnifyReason::TypeMismatch => "type mismatch".to_string(), + UnifyReason::MissingField(name) => format!("missing field '{name}'"), + UnifyReason::ExtraField(name) => format!("unexpected field '{name}'"), + UnifyReason::ParamCountMismatch { got, expected } => { + format!("expected {expected} params, got {got}") + } + UnifyReason::Nested(inner) => inner.format(store), + UnifyReason::UnionMismatch(_) => "nested union mismatch".to_string(), + }; + let _ = write!(msg, "\n tried `{}`: {reason}", store.display(err.expected)); + } + } + } + + msg + } +} + +/// Result of unification. +pub type UnifyResult = Result<(), UnifyError>; diff --git a/crates/jrsonnet-lsp/Cargo.toml b/crates/jrsonnet-lsp/Cargo.toml new file mode 100644 index 00000000..e0a1a536 --- /dev/null +++ b/crates/jrsonnet-lsp/Cargo.toml @@ -0,0 +1,54 @@ +[package] +name = "jrsonnet-lsp" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true + +[dependencies] +anyhow.workspace = true +crossbeam-channel = "0.5" +jrsonnet-lsp-check = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-check" } +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-lsp-handlers = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-handlers" } +jrsonnet-lsp-import = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-import" } +jrsonnet-lsp-inference = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-inference" } +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } +jrsonnet-evaluator.workspace = true +jrsonnet-parser.workspace = true +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +jrsonnet-stdlib.workspace = true +lsp-server.workspace = true +lsp-types.workspace = true +parking_lot.workspace = true +rayon = "1.11.0" +rustc-hash.workspace = true +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true +thiserror.workspace = true +tracing = "0.1.44" + +[lints] +workspace = true + +[dev-dependencies] +assert_matches = "1.5.0" +criterion = { version = "0.5", features = ["html_reports"] } +jrsonnet-lsp-scenario = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scenario" } +jrsonnet-lsp-scope = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scope" } +rstest = "0.23" +serde_json.workspace = true +tempfile.workspace = true + +[[bench]] +name = "type_inference" +harness = false + +[[bench]] +name = "type_cache" +harness = false + +[[bench]] +name = "unification" +harness = false diff --git a/crates/jrsonnet-lsp/benches/type_cache.rs b/crates/jrsonnet-lsp/benches/type_cache.rs new file mode 100644 index 00000000..7b5b84b2 --- /dev/null +++ b/crates/jrsonnet-lsp/benches/type_cache.rs @@ -0,0 +1,218 @@ +//! Benchmarks for type cache operations. +//! +//! Tests cache lookup, update, and invalidation performance. + +use std::sync::Arc; + +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; +use jrsonnet_lsp_document::{CanonicalPath, FileId, PathStore}; +use jrsonnet_lsp_inference::{new_shared_cache, TypeCache}; +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; + +fn make_path(i: usize) -> CanonicalPath { + CanonicalPath::new(std::path::PathBuf::from(format!("/test/file{i}.jsonnet"))) +} + +fn make_paths(size: usize) -> Vec { + (0..size).map(make_path).collect() +} + +fn intern_files(cache: &TypeCache, paths: &[CanonicalPath]) -> Vec { + paths.iter().map(|path| cache.intern(path)).collect() +} + +fn bench_cache_operations(c: &mut Criterion) { + let mut group = c.benchmark_group("type_cache"); + + // Benchmark cache updates + for size in [100, 500, 1000] { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let paths = make_paths(size); + let files = intern_files(&cache, &paths); + + group.bench_with_input(BenchmarkId::new("update", size), &size, |b, &size| { + b.iter(|| { + for file in files.iter().take(size) { + cache.update(*file, GlobalTy::NUMBER, 1); + } + }); + }); + } + + // Benchmark cache lookups (cache hit) + for size in [100, 500, 1000] { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let paths = make_paths(size); + let files = intern_files(&cache, &paths); + + // Pre-populate cache + for file in &files { + cache.update(*file, GlobalTy::NUMBER, 1); + } + + group.bench_with_input(BenchmarkId::new("lookup_hit", size), &size, |b, &size| { + b.iter(|| { + for file in files.iter().take(size) { + black_box(cache.get(*file)); + } + }); + }); + } + + // Benchmark cache lookups (cache miss) + for size in [100, 500, 1000] { + let global = Arc::new(GlobalTyStore::new()); + let cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let paths = make_paths(size); + let files = intern_files(&cache, &paths); + + group.bench_with_input(BenchmarkId::new("lookup_miss", size), &size, |b, &size| { + b.iter(|| { + for file in files.iter().take(size) { + black_box(cache.get(*file)); + } + }); + }); + } + + // Benchmark invalidation + for size in [100, 500, 1000] { + let global = Arc::new(GlobalTyStore::new()); + + group.bench_with_input(BenchmarkId::new("invalidate", size), &size, |b, &size| { + b.iter_batched( + || { + // Setup: create and populate cache + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let paths = make_paths(size); + let files = intern_files(&cache, &paths); + for file in &files { + cache.update(*file, GlobalTy::NUMBER, 1); + } + (cache, files) + }, + |(mut cache, files)| { + // Benchmark: invalidate all entries + for file in files.iter().take(size) { + cache.invalidate(*file); + } + }, + criterion::BatchSize::SmallInput, + ); + }); + } + + // Benchmark bulk invalidation + for size in [100, 500, 1000] { + let global = Arc::new(GlobalTyStore::new()); + + group.bench_with_input( + BenchmarkId::new("invalidate_many", size), + &size, + |b, &size| { + b.iter_batched( + || { + // Setup: create and populate cache + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let paths = make_paths(size); + let files = intern_files(&cache, &paths); + for file in &files { + cache.update(*file, GlobalTy::NUMBER, 1); + } + (cache, files) + }, + |(mut cache, files)| { + // Benchmark: invalidate all at once + cache.invalidate_many(files); + }, + criterion::BatchSize::SmallInput, + ); + }, + ); + } + + group.finish(); +} + +fn bench_shared_cache(c: &mut Criterion) { + let mut group = c.benchmark_group("shared_type_cache"); + + // Benchmark concurrent access patterns (simulated via sequential access with locking) + for size in [100, 500] { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); + + // Pre-populate + let files = { + let mut write_cache = cache.write(); + let paths = make_paths(size); + let files = intern_files(&write_cache, &paths); + for file in &files { + write_cache.update(*file, GlobalTy::NUMBER, 1); + } + files + }; + + group.bench_with_input(BenchmarkId::new("read_lock", size), &size, |b, &size| { + b.iter(|| { + for file in files.iter().take(size) { + let read_cache = cache.read(); + black_box(read_cache.get(*file)); + } + }); + }); + + group.bench_with_input(BenchmarkId::new("write_lock", size), &size, |b, &size| { + b.iter(|| { + for file in files.iter().take(size) { + let mut write_cache = cache.write(); + write_cache.update(*file, GlobalTy::STRING, 2); + } + }); + }); + } + + group.finish(); +} + +fn bench_version_check(c: &mut Criterion) { + let mut group = c.benchmark_group("version_check"); + + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let paths = make_paths(1000); + let files = intern_files(&cache, &paths); + + // Pre-populate with version 1 + for file in &files { + cache.update(*file, GlobalTy::NUMBER, 1); + } + + group.bench_function("is_up_to_date_hit", |b| { + b.iter(|| { + for file in &files { + black_box(cache.is_up_to_date(*file, 1)); + } + }); + }); + + group.bench_function("is_up_to_date_miss", |b| { + b.iter(|| { + for file in &files { + black_box(cache.is_up_to_date(*file, 2)); // Different version + } + }); + }); + + group.finish(); +} + +criterion_group!( + benches, + bench_cache_operations, + bench_shared_cache, + bench_version_check +); +criterion_main!(benches); diff --git a/crates/jrsonnet-lsp/benches/type_inference.rs b/crates/jrsonnet-lsp/benches/type_inference.rs new file mode 100644 index 00000000..01509a4d --- /dev/null +++ b/crates/jrsonnet-lsp/benches/type_inference.rs @@ -0,0 +1,188 @@ +//! Benchmarks for type inference performance. +//! +//! Tests type inference on Jsonnet documents of varying sizes and complexity. + +use std::{fmt::Write as _, sync::Arc}; + +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; +use jrsonnet_lsp_document::{DocVersion, Document}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_types::GlobalTyStore; + +/// Generate a simple Jsonnet document with N local bindings. +fn generate_locals_document(n: usize) -> String { + let mut code = String::new(); + for i in 0..n { + let _ = writeln!(&mut code, "local x{i} = {i};"); + } + code.push_str("{\n"); + for i in 0..n { + let _ = writeln!(&mut code, " field{i}: x{i},"); + } + code.push_str("}\n"); + code +} + +/// Generate a nested object document with depth D and width W. +fn generate_nested_object(depth: usize, width: usize) -> String { + fn generate_level(depth: usize, width: usize, indent: usize) -> String { + let spaces = " ".repeat(indent); + if depth == 0 { + return format!("{spaces}value: 42"); + } + let mut fields = Vec::new(); + for i in 0..width { + let inner = generate_level(depth - 1, width, indent + 2); + fields.push(format!("{spaces}field{i}: {{\n{inner}\n{spaces}}}")); + } + fields.join(",\n") + } + format!("{{\n{}\n}}", generate_level(depth, width, 2)) +} + +/// Generate a document with function definitions and calls. +fn generate_functions_document(n: usize) -> String { + let mut code = String::new(); + // Define functions + for i in 0..n { + let _ = writeln!(&mut code, "local fn{i}(x) = x + {i};"); + } + // Call functions + code.push_str("[\n"); + for i in 0..n { + let _ = writeln!(&mut code, " fn{i}({i}),"); + } + code.push_str("]\n"); + code +} + +/// Generate a document with array comprehensions. +fn generate_comprehension_document(size: usize) -> String { + format!( + r"local data = std.range(0, {size}); +[x * 2 for x in data if x % 2 == 0] + " + ) +} + +fn bench_type_inference(c: &mut Criterion) { + let mut group = c.benchmark_group("type_inference"); + + // Benchmark locals scaling + for size in [10, 50, 100, 200, 500] { + let code = generate_locals_document(size); + let doc = Document::new(code.clone(), DocVersion::new(1)); + let global = Arc::new(GlobalTyStore::new()); + + group.throughput(Throughput::Elements(size as u64)); + group.bench_with_input( + BenchmarkId::new("locals", size), + &(doc, global), + |b, (doc, global)| { + b.iter(|| { + let analysis = + TypeAnalysis::analyze_with_global(black_box(doc), Arc::clone(global)); + black_box(analysis.document_type()) + }); + }, + ); + } + + // Benchmark nested objects + for (depth, width) in [(2, 3), (3, 3), (4, 2), (5, 2)] { + let code = generate_nested_object(depth, width); + let doc = Document::new(code.clone(), DocVersion::new(1)); + let global = Arc::new(GlobalTyStore::new()); + let label = format!("d{depth}w{width}"); + + group.bench_with_input( + BenchmarkId::new("nested_object", &label), + &(doc, global), + |b, (doc, global)| { + b.iter(|| { + let analysis = + TypeAnalysis::analyze_with_global(black_box(doc), Arc::clone(global)); + black_box(analysis.document_type()) + }); + }, + ); + } + + // Benchmark functions + for size in [10, 25, 50, 100] { + let code = generate_functions_document(size); + let doc = Document::new(code.clone(), DocVersion::new(1)); + let global = Arc::new(GlobalTyStore::new()); + + group.throughput(Throughput::Elements(size as u64)); + group.bench_with_input( + BenchmarkId::new("functions", size), + &(doc, global), + |b, (doc, global)| { + b.iter(|| { + let analysis = + TypeAnalysis::analyze_with_global(black_box(doc), Arc::clone(global)); + black_box(analysis.document_type()) + }); + }, + ); + } + + // Benchmark comprehensions + for size in [10, 50, 100, 500] { + let code = generate_comprehension_document(size); + let doc = Document::new(code.clone(), DocVersion::new(1)); + let global = Arc::new(GlobalTyStore::new()); + + group.bench_with_input( + BenchmarkId::new("comprehension", size), + &(doc, global), + |b, (doc, global)| { + b.iter(|| { + let analysis = + TypeAnalysis::analyze_with_global(black_box(doc), Arc::clone(global)); + black_box(analysis.document_type()) + }); + }, + ); + } + + group.finish(); +} + +fn bench_repeated_analysis(c: &mut Criterion) { + let mut group = c.benchmark_group("repeated_analysis"); + + // Test that repeated analysis with same global store is efficient + let code = generate_locals_document(100); + let global = Arc::new(GlobalTyStore::new()); + + group.bench_function("same_global_store", |b| { + b.iter(|| { + // Simulate analyzing 10 documents with shared global store + for i in 0..10 { + let doc = Document::new(code.clone(), DocVersion::new(i)); + let analysis = + TypeAnalysis::analyze_with_global(black_box(&doc), Arc::clone(&global)); + black_box(analysis.document_type()); + } + }); + }); + + group.bench_function("fresh_global_store", |b| { + b.iter(|| { + // Simulate analyzing 10 documents with fresh global stores + for i in 0..10 { + let doc = Document::new(code.clone(), DocVersion::new(i)); + let fresh_global = Arc::new(GlobalTyStore::new()); + let analysis = TypeAnalysis::analyze_with_global(black_box(&doc), fresh_global); + black_box(analysis.document_type()); + } + }); + }); + + group.finish(); +} + +criterion_group!(benches, bench_type_inference, bench_repeated_analysis); +criterion_main!(benches); diff --git a/crates/jrsonnet-lsp/benches/unification.rs b/crates/jrsonnet-lsp/benches/unification.rs new file mode 100644 index 00000000..e13df58c --- /dev/null +++ b/crates/jrsonnet-lsp/benches/unification.rs @@ -0,0 +1,339 @@ +//! Benchmarks for type unification. +//! +//! Tests unification performance for various type combinations. + +use std::sync::Arc; + +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; +use jrsonnet_lsp_types::{ + is_subtype_ty, FieldDefInterned, FieldVis, FunctionData, GlobalTyStore, MutStore, ObjectData, + ParamInterned, ReturnSpec, Ty, TyData, +}; + +fn bench_primitive_unification(c: &mut Criterion) { + let mut group = c.benchmark_group("unification_primitives"); + + let global = Arc::new(GlobalTyStore::new()); + + // Same type comparisons (fast path) + group.bench_function("same_type", |b| { + let store = MutStore::new(Arc::clone(&global)); + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, Ty::NUMBER, Ty::NUMBER)); + black_box(is_subtype_ty(&store, Ty::STRING, Ty::STRING)); + black_box(is_subtype_ty(&store, Ty::BOOL, Ty::BOOL)); + } + }); + }); + + // ANY comparisons + group.bench_function("any_supertype", |b| { + let store = MutStore::new(Arc::clone(&global)); + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, Ty::NUMBER, Ty::ANY)); + black_box(is_subtype_ty(&store, Ty::STRING, Ty::ANY)); + black_box(is_subtype_ty(&store, Ty::BOOL, Ty::ANY)); + } + }); + }); + + // NEVER comparisons + group.bench_function("never_subtype", |b| { + let store = MutStore::new(Arc::clone(&global)); + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, Ty::NEVER, Ty::NUMBER)); + black_box(is_subtype_ty(&store, Ty::NEVER, Ty::STRING)); + black_box(is_subtype_ty(&store, Ty::NEVER, Ty::ANY)); + } + }); + }); + + // Incompatible types + group.bench_function("incompatible", |b| { + let store = MutStore::new(Arc::clone(&global)); + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, Ty::NUMBER, Ty::STRING)); + black_box(is_subtype_ty(&store, Ty::STRING, Ty::BOOL)); + black_box(is_subtype_ty(&store, Ty::BOOL, Ty::NUMBER)); + } + }); + }); + + group.finish(); +} + +fn bench_array_unification(c: &mut Criterion) { + let mut group = c.benchmark_group("unification_arrays"); + + let global = Arc::new(GlobalTyStore::new()); + + // Simple array comparisons + group.bench_function("simple_arrays", |b| { + let mut store = MutStore::new(Arc::clone(&global)); + let arr_num = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let arr_str = store.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + let arr_any = store.intern(TyData::Array { + elem: Ty::ANY, + is_set: false, + }); + + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, arr_num, arr_num)); + black_box(is_subtype_ty(&store, arr_num, arr_any)); + black_box(is_subtype_ty(&store, arr_num, arr_str)); + } + }); + }); + + // Nested arrays + group.bench_function("nested_arrays", |b| { + let mut store = MutStore::new(Arc::clone(&global)); + let arr_num = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let arr_arr_num = store.intern(TyData::Array { + elem: arr_num, + is_set: false, + }); + let arr_arr_arr_num = store.intern(TyData::Array { + elem: arr_arr_num, + is_set: false, + }); + + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, arr_arr_num, arr_arr_num)); + black_box(is_subtype_ty(&store, arr_arr_arr_num, arr_arr_arr_num)); + } + }); + }); + + group.finish(); +} + +fn bench_object_unification(c: &mut Criterion) { + let mut group = c.benchmark_group("unification_objects"); + + let global = Arc::new(GlobalTyStore::new()); + + // Small objects + for n_fields in [2, 5, 10, 20] { + group.bench_with_input( + BenchmarkId::new("small_object", n_fields), + &n_fields, + |b, &n_fields| { + let mut store = MutStore::new(Arc::clone(&global)); + + let fields: Vec<_> = (0..n_fields) + .map(|i| { + ( + format!("field{i}"), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + ) + }) + .collect(); + + let obj = store.intern(TyData::Object(ObjectData { + fields, + has_unknown: false, + })); + + b.iter(|| { + for _ in 0..100 { + black_box(is_subtype_ty(&store, obj, obj)); + } + }); + }, + ); + } + + // Object with unknown fields + group.bench_function("open_object", |b| { + let mut store = MutStore::new(Arc::clone(&global)); + + let closed_obj = store.intern(TyData::Object(ObjectData { + fields: vec![( + "x".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + })); + + let open_obj = store.intern(TyData::Object(ObjectData { + fields: vec![( + "x".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + })); + + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, closed_obj, open_obj)); + black_box(is_subtype_ty(&store, open_obj, closed_obj)); + } + }); + }); + + group.finish(); +} + +fn bench_function_unification(c: &mut Criterion) { + let mut group = c.benchmark_group("unification_functions"); + + let global = Arc::new(GlobalTyStore::new()); + + // Simple functions + group.bench_function("simple_function", |b| { + let mut store = MutStore::new(Arc::clone(&global)); + + let fn1 = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".into(), + ty: Ty::NUMBER, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + })); + + let fn2 = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".into(), + ty: Ty::ANY, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + })); + + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, fn1, fn1)); + black_box(is_subtype_ty(&store, fn1, fn2)); + } + }); + }); + + // Functions with multiple params + for n_params in [1, 3, 5, 10] { + group.bench_with_input( + BenchmarkId::new("multi_param_function", n_params), + &n_params, + |b, &n_params| { + let mut store = MutStore::new(Arc::clone(&global)); + + let params: Vec<_> = (0..n_params) + .map(|i| ParamInterned { + name: format!("p{i}"), + ty: Ty::NUMBER, + has_default: false, + }) + .collect(); + + let func = store.intern(TyData::Function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + })); + + b.iter(|| { + for _ in 0..100 { + black_box(is_subtype_ty(&store, func, func)); + } + }); + }, + ); + } + + group.finish(); +} + +fn bench_union_unification(c: &mut Criterion) { + let mut group = c.benchmark_group("unification_unions"); + + let global = Arc::new(GlobalTyStore::new()); + + // Small unions + group.bench_function("small_union", |b| { + let mut store = MutStore::new(Arc::clone(&global)); + + let union2 = store.intern(TyData::Union(vec![Ty::NUMBER, Ty::STRING])); + let union3 = store.intern(TyData::Union(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL])); + + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, Ty::NUMBER, union2)); + black_box(is_subtype_ty(&store, union2, union3)); + } + }); + }); + + // Larger unions + for n_variants in [2, 4, 8] { + group.bench_with_input( + BenchmarkId::new("union_size", n_variants), + &n_variants, + |b, &n_variants| { + let mut store = MutStore::new(Arc::clone(&global)); + + // Create distinct types for the union + let variants: Vec<_> = (0..n_variants) + .map(|i| { + store.intern(TyData::Array { + elem: if i % 2 == 0 { Ty::NUMBER } else { Ty::STRING }, + is_set: false, + }) + }) + .collect(); + + let union = store.intern(TyData::Union(variants.clone())); + + b.iter(|| { + for _ in 0..100 { + // Check if each variant is subtype of union + for v in &variants { + black_box(is_subtype_ty(&store, *v, union)); + } + } + }); + }, + ); + } + + group.finish(); +} + +criterion_group!( + benches, + bench_primitive_unification, + bench_array_unification, + bench_object_unification, + bench_function_unification, + bench_union_unification, +); +criterion_main!(benches); diff --git a/crates/jrsonnet-lsp/src/analysis/eval.rs b/crates/jrsonnet-lsp/src/analysis/eval.rs new file mode 100644 index 00000000..0fe85055 --- /dev/null +++ b/crates/jrsonnet-lsp/src/analysis/eval.rs @@ -0,0 +1,401 @@ +//! Evaluation diagnostics for runtime error detection. +//! +//! This module provides the ability to evaluate Jsonnet documents and convert +//! runtime errors into LSP diagnostics. + +use std::{ + collections::HashSet, + path::{Path, PathBuf}, +}; + +use jrsonnet_evaluator::{ + error::{Error as EvalError, ErrorKind as EvalErrorKind}, + trace::PathResolver, + FileImportResolver, State, +}; +use jrsonnet_lsp_document::{CanonicalPath, LineIndex}; +use jrsonnet_parser::{SourceFile, SourcePath}; +use jrsonnet_stdlib::ContextInitializer; +use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString, Range}; + +use super::tanka; +use crate::config::ResolvePathsWithTankaMode; + +/// Configuration for evaluation diagnostics. +#[derive(Debug, Clone, Default)] +pub struct EvalConfig { + /// Import paths (JPATH) to search for imports. + pub jpath: Vec, + /// Whether to resolve paths using Tanka conventions. + pub resolve_paths_with_tanka: ResolvePathsWithTankaMode, +} + +/// Create a jrsonnet State with the given jpath entries. +pub(crate) fn create_state_with_jpath(jpath: &[PathBuf]) -> State { + let resolver = PathResolver::new_cwd_fallback(); + let context_initializer = ContextInitializer::new(resolver); + + let mut import_resolver = FileImportResolver::default(); + for path in jpath { + import_resolver.add_jpath(path.clone()); + } + + let mut builder = State::builder(); + builder.context_initializer(context_initializer); + builder.import_resolver(import_resolver); + builder.build() +} + +/// Build effective import roots for eval-style snippet execution. +/// +/// Eval paths are executed via virtual snippets, so they do not naturally +/// resolve relative imports from the document directory. To mirror file-based +/// behavior, always include the file parent as a search root. +#[must_use] +pub(crate) fn eval_import_roots_for_file( + path: &Path, + configured_roots: &[PathBuf], + resolve_paths_with_tanka: ResolvePathsWithTankaMode, +) -> Vec { + let mut roots = Vec::new(); + if let Some(parent) = path.parent() { + roots.push(parent.to_path_buf()); + } + roots.extend(tanka::effective_import_roots( + path, + configured_roots, + resolve_paths_with_tanka, + )); + + let mut seen = HashSet::new(); + roots + .into_iter() + .filter(|root| seen.insert(root.clone())) + .collect() +} + +/// Evaluator for Jsonnet documents. +pub struct Evaluator { + /// Base jpath configuration (from settings). + base_jpath: Vec, + /// Tanka path resolution mode. + tanka_mode: ResolvePathsWithTankaMode, +} + +#[derive(Debug, Clone)] +pub struct EvalDiagnostic { + pub error_kind: EvalErrorKind, + pub diagnostic: Diagnostic, +} + +impl EvalDiagnostic { + #[must_use] + pub const fn is_type_like(&self) -> bool { + matches!( + self.error_kind, + EvalErrorKind::UnaryOperatorDoesNotOperateOnType(_, _) + | EvalErrorKind::BinaryOperatorDoesNotOperateOnValues(_, _, _) + | EvalErrorKind::TypeMismatch(_, _, _) + | EvalErrorKind::NoSuchField(_, _) + | EvalErrorKind::OnlyFunctionsCanBeCalledGot(_) + | EvalErrorKind::FieldMustBeStringGot(_) + | EvalErrorKind::AttemptedIndexAnArrayWithString(_) + | EvalErrorKind::ValueIndexMustBeTypeGot(_, _, _) + | EvalErrorKind::CantIndexInto(_) + | EvalErrorKind::ValueIsNotIndexable(_) + | EvalErrorKind::TypeError(_) + ) + } +} + +impl Evaluator { + /// Create a new evaluator with the given configuration. + #[must_use] + pub fn new(config: &EvalConfig) -> Self { + Self { + base_jpath: config.jpath.clone(), + tanka_mode: config.resolve_paths_with_tanka, + } + } + + /// Get the jpath for evaluating a specific file. + /// + /// If Tanka mode is enabled, this will resolve paths based on the + /// file's location in the Tanka project structure. + fn get_jpath_for_file(&self, path: &Path) -> Vec { + eval_import_roots_for_file(path, &self.base_jpath, self.tanka_mode) + } + + /// Evaluate a document and return any diagnostics. + /// + /// Returns `None` if evaluation succeeds, or a diagnostic if it fails. + #[must_use] + pub fn evaluate( + &self, + path: &CanonicalPath, + text: &str, + line_index: &LineIndex, + ) -> Option { + let jpath = self.get_jpath_for_file(path.as_path()); + let state = create_state_with_jpath(&jpath); + + let source_path = SourcePath::new(SourceFile::new(path.as_path().to_path_buf())); + + // Evaluate the snippet + match state.evaluate_snippet(source_path.to_string(), text) { + Ok(_) => None, + Err(err) => Some(eval_error_to_diagnostic(&err, path, text, line_index)), + } + } +} + +/// Convert an evaluation error to an LSP diagnostic. +fn eval_error_to_lsp_diagnostic( + err: &EvalError, + file_path: &CanonicalPath, + text: &str, + line_index: &LineIndex, +) -> Diagnostic { + let range = find_trace_range_in_file(err, file_path, text, line_index).unwrap_or_default(); + + // Format the error message + let error_kind = err.error(); + let message = format!("{error_kind}"); + + Diagnostic { + range, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("eval-error".to_string())), + code_description: None, + source: Some("jrsonnet-eval".to_string()), + message, + related_information: None, + tags: None, + data: None, + } +} + +/// Convert an evaluation error to a typed diagnostic. +fn eval_error_to_diagnostic( + err: &EvalError, + file_path: &CanonicalPath, + text: &str, + line_index: &LineIndex, +) -> EvalDiagnostic { + let diagnostic = eval_error_to_lsp_diagnostic(err, file_path, text, line_index); + EvalDiagnostic { + error_kind: err.error().clone(), + diagnostic, + } +} + +fn find_trace_range_in_file( + err: &EvalError, + file_path: &CanonicalPath, + text: &str, + line_index: &LineIndex, +) -> Option { + let file_display = file_path.as_path().display().to_string(); + for element in &err.trace().0 { + let Some(span) = &element.location else { + continue; + }; + + let span_path = span.0.source_path(); + let in_current_file = span_path + .path() + .is_some_and(|path| path == file_path.as_path()) + || span_path.to_string() == file_display; + if !in_current_file { + continue; + } + + if let Some(start) = line_index.position(span.1.into(), text) { + let end = line_index.position(span.2.into(), text).unwrap_or(start); + return Some(Range { + start: start.into(), + end: end.into(), + }); + } + } + + None +} + +#[cfg(test)] +mod tests { + use std::fs; + + use assert_matches::assert_matches; + use jrsonnet_lsp_document::{DocVersion, Document}; + use tempfile::TempDir; + + use super::*; + + fn test_path() -> CanonicalPath { + CanonicalPath::new("/test.jsonnet".into()) + } + + /// Assert that a diagnostic has the expected eval-error structure. + fn assert_eval_diagnostic(diag: &EvalDiagnostic) { + let lsp_diag = &diag.diagnostic; + assert_eq!( + lsp_diag.severity, + Some(DiagnosticSeverity::ERROR), + "expected ERROR severity" + ); + assert_eq!( + lsp_diag.code, + Some(NumberOrString::String("eval-error".to_string())), + "expected eval-error code" + ); + assert_eq!( + lsp_diag.source, + Some("jrsonnet-eval".to_string()), + "expected jrsonnet-eval source" + ); + assert!( + !lsp_diag.message.is_empty(), + "eval diagnostic message should not be empty" + ); + } + + #[test] + fn test_eval_success() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new(r#"{ hello: "world" }"#.to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + assert_matches!(result, None); + } + + #[test] + fn test_eval_undefined_variable() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new("undefined_var".to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with undefined variable"); + assert_matches!( + diag.error_kind, + EvalErrorKind::VariableIsNotDefined(ref name, ref suggestions) + if name == "undefined_var" && suggestions.is_empty() + ); + assert_eval_diagnostic(&diag); + } + + #[test] + fn test_eval_type_error() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + // Array index must be a number, not a string + let doc = Document::new(r#"[1, 2, 3]["hello"]"#.to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with type error"); + assert!(diag.is_type_like(), "expected type-like eval error variant"); + assert_matches!( + diag.error_kind, + EvalErrorKind::TypeError(_) + | EvalErrorKind::TypeMismatch(_, _, _) + | EvalErrorKind::AttemptedIndexAnArrayWithString(_) + | EvalErrorKind::ValueIndexMustBeTypeGot(_, _, _) + | EvalErrorKind::CantIndexInto(_) + ); + assert_eval_diagnostic(&diag); + } + + #[test] + fn test_eval_uses_non_default_range_for_virtual_main_file() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new("std.length(1)".to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with type error"); + assert!(diag.is_type_like(), "expected type-like eval error variant"); + assert_ne!( + diag.diagnostic.range, + lsp_types::Range::default(), + "eval diagnostics should point at a concrete span instead of 0:0" + ); + } + + #[test] + fn test_eval_assert_failure() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new(r"assert false; true".to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with assert"); + assert_matches!(diag.error_kind, EvalErrorKind::AssertionFailed(_)); + assert_eval_diagnostic(&diag); + } + + #[test] + fn test_eval_field_access_error() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new(r"{}.nonexistent".to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with field access error"); + assert_matches!( + diag.error_kind, + EvalErrorKind::NoSuchField(ref field, ref suggestions) + if field == "nonexistent" && suggestions.is_empty() + ); + assert_eval_diagnostic(&diag); + } + + #[test] + fn test_eval_runtime_error() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new( + r#"error "custom error message""#.to_string(), + DocVersion::new(1), + ); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with runtime error"); + assert_matches!( + diag.error_kind, + EvalErrorKind::RuntimeError(ref message) if message == "custom error message" + ); + assert_eval_diagnostic(&diag); + } + + #[test] + fn test_eval_import_roots_include_parent_in_auto_without_tanka_root() { + let tmp = TempDir::new().expect("tmp should be created"); + let env_dir = tmp.path().join("env"); + fs::create_dir_all(&env_dir).expect("env should be created"); + let file_path = env_dir.join("main.jsonnet"); + fs::write(&file_path, "{}").expect("file should be created"); + + let roots = eval_import_roots_for_file(&file_path, &[], ResolvePathsWithTankaMode::Auto); + assert_eq!(roots, vec![env_dir]); + } + + #[test] + fn test_eval_import_roots_parent_precedes_configured_roots() { + let tmp = TempDir::new().expect("tmp should be created"); + let env_dir = tmp.path().join("env"); + let configured_dir = tmp.path().join("configured"); + fs::create_dir_all(&env_dir).expect("env should be created"); + fs::create_dir_all(&configured_dir).expect("configured should be created"); + let file_path = env_dir.join("main.jsonnet"); + fs::write(&file_path, "{}").expect("file should be created"); + + let roots = eval_import_roots_for_file( + &file_path, + std::slice::from_ref(&configured_dir), + ResolvePathsWithTankaMode::False, + ); + assert_eq!(roots, vec![env_dir, configured_dir]); + } +} diff --git a/crates/jrsonnet-lsp/src/analysis/mod.rs b/crates/jrsonnet-lsp/src/analysis/mod.rs new file mode 100644 index 00000000..062aa4a3 --- /dev/null +++ b/crates/jrsonnet-lsp/src/analysis/mod.rs @@ -0,0 +1,9 @@ +//! Analysis module for evaluation-specific functionality. +//! +//! This module contains evaluation-related modules that depend on jrsonnet-evaluator. +//! For static analysis, see the jrsonnet-lsp-check crate. + +pub mod eval; +pub mod tanka; + +pub use eval::{EvalConfig, EvalDiagnostic, Evaluator}; diff --git a/crates/jrsonnet-lsp/src/analysis/tanka.rs b/crates/jrsonnet-lsp/src/analysis/tanka.rs new file mode 100644 index 00000000..59198cf9 --- /dev/null +++ b/crates/jrsonnet-lsp/src/analysis/tanka.rs @@ -0,0 +1,270 @@ +//! Tanka integration for resolving import paths. +//! +//! Depending on `resolve_paths_with_tanka` mode, the LSP can look for +//! `jsonnetfile.json` or `tkrc.yaml` in parent directories to find the +//! project root and add appropriate vendor/lib paths to the jpath. + +use std::{ + collections::HashSet, + path::{Path, PathBuf}, +}; + +use crate::config::ResolvePathsWithTankaMode; + +/// Find the Tanka project root by walking up from the given path. +/// +/// The root is the directory that contains either `tkrc.yaml` or `jsonnetfile.json`. +/// Returns `None` if no root is found. +#[must_use] +pub fn find_root(path: &Path) -> Option { + let start = if path.is_file() { path.parent()? } else { path }; + + // Try tkrc.yaml first, then jsonnetfile.json + find_parent_file(start, "tkrc.yaml").or_else(|| find_parent_file(start, "jsonnetfile.json")) +} + +/// Walk up the directory tree looking for a file with the given name. +/// Returns the directory containing the file, or None if not found. +fn find_parent_file(start: &Path, filename: &str) -> Option { + let mut current = start.to_path_buf(); + + loop { + let candidate = current.join(filename); + if candidate.exists() { + return Some(current); + } + + match current.parent() { + Some(parent) if parent != current => { + current = parent.to_path_buf(); + } + _ => return None, + } + } +} + +fn file_base(path: &Path) -> Option { + if path.is_file() { + path.parent().map(Path::to_path_buf) + } else { + Some(path.to_path_buf()) + } +} + +fn build_jpath(root: &Path, base: &Path) -> Vec { + let mut jpath = Vec::new(); + + // Add root/vendor + let root_vendor = root.join("vendor"); + if root_vendor.is_dir() { + jpath.push(root_vendor); + } + + // Add base/vendor if different from root + if base != root { + let base_vendor = base.join("vendor"); + if base_vendor.is_dir() { + jpath.push(base_vendor); + } + } + + // Add root/lib + let root_lib = root.join("lib"); + if root_lib.is_dir() { + jpath.push(root_lib); + } + + // Add base directory + jpath.push(base.to_path_buf()); + jpath +} + +/// Resolve jpath entries for a file using a configured Tanka mode. +/// +/// Returns a list of paths to add to jpath for import resolution: +/// - `/vendor` +/// - `/vendor` (if different from root) +/// - `/lib` +/// - `` (the directory containing the file) +/// +/// Mode behavior: +/// - `false`: no extra roots +/// - `auto`: only when a Tanka root marker is found +/// - `true`: same as `auto`, but falls back to using `` as root +pub fn resolve_jpath(path: &Path, mode: ResolvePathsWithTankaMode) -> Vec { + let Some(base) = file_base(path) else { + return Vec::new(); + }; + + match mode { + ResolvePathsWithTankaMode::False => Vec::new(), + ResolvePathsWithTankaMode::Auto => { + let Some(root) = find_root(path) else { + return Vec::new(); + }; + build_jpath(&root, &base) + } + ResolvePathsWithTankaMode::True => { + let root = find_root(path).unwrap_or_else(|| base.clone()); + build_jpath(&root, &base) + } + } +} + +/// Compute effective import roots for LSP import resolution. +/// +/// This always includes configured roots. When Tanka mode is enabled, additional +/// Tanka-derived roots are appended and the result is de-duplicated while +/// preserving order. +#[must_use] +pub fn effective_import_roots( + path: &Path, + configured_roots: &[PathBuf], + resolve_paths_with_tanka: ResolvePathsWithTankaMode, +) -> Vec { + let mut roots: Vec = configured_roots.to_vec(); + roots.extend(resolve_jpath(path, resolve_paths_with_tanka)); + + let mut seen = HashSet::new(); + roots + .into_iter() + .filter(|root| seen.insert(root.clone())) + .collect() +} + +#[cfg(test)] +mod tests { + use std::fs; + + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_find_root_with_jsonnetfile() { + let tmp = TempDir::new().expect("expected success"); + let root = tmp.path(); + + // Create jsonnetfile.json at root + fs::write(root.join("jsonnetfile.json"), "{}").expect("expected success"); + + // Create a nested directory + let nested = root.join("environments").join("prod"); + fs::create_dir_all(&nested).expect("expected success"); + + // Find root from nested directory + let found = find_root(&nested); + assert_eq!(found, Some(root.to_path_buf())); + } + + #[test] + fn test_find_root_with_tkrc() { + let tmp = TempDir::new().expect("expected success"); + let root = tmp.path(); + + // Create tkrc.yaml at root (takes precedence over jsonnetfile.json) + fs::write(root.join("tkrc.yaml"), "").expect("expected success"); + fs::write(root.join("jsonnetfile.json"), "{}").expect("expected success"); + + let found = find_root(root); + assert_eq!(found, Some(root.to_path_buf())); + } + + #[test] + fn test_find_root_not_found() { + let tmp = TempDir::new().expect("expected success"); + let found = find_root(tmp.path()); + assert_eq!(found, None); + } + + #[test] + fn test_resolve_jpath() { + let tmp = TempDir::new().expect("expected success"); + let root = tmp.path(); + + // Create Tanka structure + fs::write(root.join("jsonnetfile.json"), "{}").expect("expected success"); + fs::create_dir(root.join("vendor")).expect("expected success"); + fs::create_dir(root.join("lib")).expect("expected success"); + + let env = root.join("environments").join("prod"); + fs::create_dir_all(&env).expect("expected success"); + + // Resolve jpath from environment directory + let jpath = resolve_jpath(&env, ResolvePathsWithTankaMode::Auto); + let expected = vec![root.join("vendor"), root.join("lib"), env]; + assert_eq!(jpath, expected); + } + + #[test] + fn test_resolve_jpath_with_env_vendor() { + let tmp = TempDir::new().expect("expected success"); + let root = tmp.path(); + + // Create Tanka structure with env-level vendor + fs::write(root.join("jsonnetfile.json"), "{}").expect("expected success"); + fs::create_dir(root.join("vendor")).expect("expected success"); + + let env = root.join("environments").join("prod"); + fs::create_dir_all(&env).expect("expected success"); + fs::create_dir(env.join("vendor")).expect("expected success"); + + // Resolve jpath from environment directory + let jpath = resolve_jpath(&env, ResolvePathsWithTankaMode::Auto); + let expected = vec![root.join("vendor"), env.join("vendor"), env]; + assert_eq!(jpath, expected); + } + + #[test] + fn test_resolve_jpath_no_root() { + let tmp = TempDir::new().expect("expected success"); + let jpath = resolve_jpath(tmp.path(), ResolvePathsWithTankaMode::Auto); + assert_eq!(jpath, Vec::::new()); + } + + #[test] + fn test_resolve_jpath_force_mode_without_root() { + let tmp = TempDir::new().expect("expected success"); + let base = tmp.path().join("env"); + fs::create_dir_all(&base).expect("expected success"); + fs::create_dir(base.join("vendor")).expect("expected success"); + fs::create_dir(base.join("lib")).expect("expected success"); + + let jpath = resolve_jpath(&base, ResolvePathsWithTankaMode::True); + let expected = vec![base.join("vendor"), base.join("lib"), base]; + assert_eq!(jpath, expected); + } + + #[test] + fn test_effective_import_roots_without_tanka() { + let tmp = TempDir::new().expect("expected success"); + let base = tmp.path().join("env").join("main.jsonnet"); + fs::create_dir_all(base.parent().expect("base should have parent")) + .expect("expected success"); + fs::write(&base, "{}").expect("expected success"); + + let configured = vec![ + PathBuf::from("/configured/one"), + PathBuf::from("/configured/two"), + ]; + let roots = effective_import_roots(&base, &configured, ResolvePathsWithTankaMode::False); + assert_eq!(roots, configured); + } + + #[test] + fn test_effective_import_roots_with_tanka_dedups() { + let tmp = TempDir::new().expect("expected success"); + let root = tmp.path(); + fs::write(root.join("jsonnetfile.json"), "{}").expect("expected success"); + fs::create_dir(root.join("vendor")).expect("expected success"); + let env = root.join("environments").join("prod"); + fs::create_dir_all(&env).expect("expected success"); + let file = env.join("main.jsonnet"); + fs::write(&file, "{}").expect("expected success"); + + let configured = vec![root.join("vendor")]; + let roots = effective_import_roots(&file, &configured, ResolvePathsWithTankaMode::Auto); + let expected = vec![root.join("vendor"), env]; + assert_eq!(roots, expected); + } +} diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs new file mode 100644 index 00000000..f985c288 --- /dev/null +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -0,0 +1,517 @@ +//! Async diagnostics with debouncing. +//! +//! Runs diagnostics computation in a background thread to avoid blocking the LSP event loop. +//! Debounces rapid edits to avoid computing diagnostics for intermediate states. + +use std::{ + path::PathBuf, + sync::{ + atomic::{AtomicU64, Ordering}, + Arc, + }, + thread, + time::Duration, +}; + +use crossbeam_channel::{Receiver, Sender}; +use jrsonnet_lsp_document::{DocVersion, Document, FileId}; +use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; +use jrsonnet_lsp_inference::{ + DocumentSource, SharedDocumentManager, SharedTypeCache, TypeProvider, +}; +use jrsonnet_lsp_types::GlobalTyStore; +use parking_lot::RwLock; +use rustc_hash::FxHashMap; +use tracing::{debug, trace}; + +use crate::{analysis::Evaluator, handlers}; + +/// Debounce delay before computing diagnostics after a change. +const DEBOUNCE_DELAY_MS: u64 = 500; + +/// Request to compute diagnostics for a document. +/// +/// We pass `text` and `version` to compute diagnostics against the exact +/// document state that triggered this request. +#[derive(Debug, Clone)] +struct DiagnosticsRequest { + /// Interned file identifier for sequence tracking. + file_id: FileId, + /// The document text. + text: String, + /// The document version. + version: DocVersion, + /// Whether to enable lint diagnostics. + enable_lint: bool, + /// Import roots to use for resolving import paths. + import_roots: Vec, + /// Sequence number to detect stale requests. + sequence: u64, +} + +/// Completed diagnostics result. +#[derive(Debug)] +pub struct DiagnosticsResult { + /// The computed diagnostics params. + pub params: lsp_types::PublishDiagnosticsParams, +} + +/// Configuration for the async diagnostics runner. +#[derive(Clone)] +pub struct DiagnosticsConfig { + /// Optional evaluator for runtime diagnostics. + pub evaluator: Option>, + /// Document source used for dependency-aware type analysis. + pub documents: SharedDocumentManager, + /// Import graph used to analyze dependencies before target files. + pub import_graph: Arc>, + /// Shared type cache used by the type provider. + pub type_cache: SharedTypeCache, + /// Shared global type store. + pub global_types: Arc, +} + +struct WorkerDocumentSource { + current_file: FileId, + current_doc: Document, + documents: SharedDocumentManager, +} + +struct WorkerRuntime { + requests: Receiver, + results: Sender, + latest_sequences: Arc>>, + config: DiagnosticsConfig, +} + +impl DocumentSource for WorkerDocumentSource { + fn get_document_file(&self, file: FileId) -> Option { + if file == self.current_file { + return Some(self.current_doc.clone()); + } + self.documents.get_document_file(file) + } +} + +/// Async diagnostics runner. +/// +/// Computes diagnostics in a background thread with debouncing. +pub struct AsyncDiagnostics { + /// Channel to send requests to the background thread. + request_sender: Sender, + /// Channel to receive completed diagnostics. + result_receiver: Receiver, + /// Sequence counter for detecting stale requests. + sequence: AtomicU64, + /// Latest requested sequence per file (for debouncing). + latest_sequences: Arc>>, + /// Background thread handle. + _thread_handle: thread::JoinHandle<()>, +} + +impl AsyncDiagnostics { + /// Create a new async diagnostics runner with the given configuration. + #[must_use] + pub fn new(config: DiagnosticsConfig) -> Self { + let (request_sender, request_receiver) = crossbeam_channel::unbounded(); + let (result_sender, result_receiver) = crossbeam_channel::unbounded(); + let latest_sequences = Arc::new(RwLock::new(FxHashMap::default())); + let sequences_clone = Arc::clone(&latest_sequences); + + let thread_handle = thread::spawn(move || { + Self::worker_loop(WorkerRuntime { + requests: request_receiver, + results: result_sender, + latest_sequences: sequences_clone, + config, + }); + }); + + Self { + request_sender, + result_receiver, + sequence: AtomicU64::new(0), + latest_sequences, + _thread_handle: thread_handle, + } + } + + /// Schedule diagnostics computation for a document. + /// + /// The request is debounced - if multiple requests come in for the same file + /// within the debounce window, only the latest is processed. + pub fn schedule( + &self, + file_id: FileId, + text: String, + version: DocVersion, + enable_lint: bool, + import_roots: Vec, + ) { + let sequence = self.sequence.fetch_add(1, Ordering::SeqCst); + + // Record this as the latest sequence for this path + self.latest_sequences.write().insert(file_id, sequence); + + let request = DiagnosticsRequest { + file_id, + text, + version, + enable_lint, + import_roots, + sequence, + }; + + // Send to background thread (non-blocking) + if let Err(e) = self.request_sender.send(request) { + debug!("Failed to send diagnostics request: {}", e); + } + } + + /// Get the result receiver for polling completed diagnostics. + pub fn results(&self) -> &Receiver { + &self.result_receiver + } + + /// Background worker loop. + fn worker_loop(runtime: WorkerRuntime) { + let WorkerRuntime { + requests, + results, + latest_sequences, + config, + } = runtime; + + loop { + // Wait for a request + let Ok(first_request) = requests.recv() else { + // Channel closed, exit + debug!("Diagnostics worker: channel closed, exiting"); + break; + }; + + // Debounce: wait a bit before processing + thread::sleep(Duration::from_millis(DEBOUNCE_DELAY_MS)); + + for request in Self::collect_latest_requests(first_request, &requests) { + trace!( + "Diagnostics worker: received request (seq={})", + request.sequence + ); + + // Check if this request is still the latest for this file + { + let sequences = latest_sequences.read(); + if let Some(&latest) = sequences.get(&request.file_id) { + if latest > request.sequence { + trace!( + "Diagnostics worker: skipping stale request (seq={}, latest={})", + request.sequence, + latest + ); + continue; + } + } + } + + // Compute diagnostics + // Reconstruct the document in the worker thread + let document = Document::new(request.text, request.version); + let Some(path) = config.documents.path(request.file_id) else { + debug!( + "Diagnostics worker: path missing for interned file, skipping diagnostics" + ); + continue; + }; + trace!( + "Diagnostics worker: computing diagnostics for {}", + path.as_ref() + ); + let doc_source = WorkerDocumentSource { + current_file: request.file_id, + current_doc: document.clone(), + documents: Arc::clone(&config.documents), + }; + let provider = TypeProvider::new( + Arc::clone(&config.type_cache), + Arc::clone(&config.import_graph), + Arc::clone(&config.global_types), + ); + let analysis = Arc::new(provider.analyze(path.as_ref(), &document, &doc_source)); + let import_resolution = ImportResolution::new(path.as_ref(), &request.import_roots); + let import_occurrences = import_resolution.parse_occurrences(&document); + + let Some(params) = handlers::publish_diagnostics_params( + path.as_ref(), + &document, + request.enable_lint, + config.evaluator.as_deref(), + analysis.as_ref(), + &import_occurrences, + ) else { + debug!( + "Diagnostics worker: failed to build URI for {}, skipping diagnostics", + path.as_ref() + ); + continue; + }; + + // Check again if still the latest (diagnostics computation may have taken time) + { + let sequences = latest_sequences.read(); + if let Some(&latest) = sequences.get(&request.file_id) { + if latest > request.sequence { + trace!( + "Diagnostics worker: discarding stale result (seq={}, latest={})", + request.sequence, + latest + ); + continue; + } + } + } + + // Reuse analysis work done for diagnostics in foreground requests. + config.documents.cache_analysis_file( + request.file_id, + request.version, + Arc::clone(&analysis), + ); + + // Send result + let result = DiagnosticsResult { params }; + + if results.send(result).is_err() { + debug!("Diagnostics worker: result channel closed, exiting"); + return; + } + } + } + } + + fn collect_latest_requests( + first_request: DiagnosticsRequest, + requests: &Receiver, + ) -> Vec { + let mut latest_by_file = FxHashMap::default(); + latest_by_file.insert(first_request.file_id, first_request); + + for request in requests.try_iter() { + latest_by_file.insert(request.file_id, request); + } + + let mut coalesced: Vec<_> = latest_by_file.into_values().collect(); + coalesced.sort_unstable_by_key(|request| request.file_id); + coalesced + } +} + +impl Drop for AsyncDiagnostics { + fn drop(&mut self) { + // Dropping the sender will cause the worker to exit + // when it next tries to receive. + // We don't need to explicitly signal shutdown. + } +} + +#[cfg(test)] +mod tests { + use std::time::Duration; + + use jrsonnet_lsp_document::{CanonicalPath, PathStore}; + use rustc_hash::FxHashMap; + + use super::*; + + fn test_config() -> DiagnosticsConfig { + let global_types = Arc::new(GlobalTyStore::new()); + let path_store = PathStore::new(); + DiagnosticsConfig { + evaluator: None, + documents: Arc::new(jrsonnet_lsp_inference::DocumentManager::new( + Arc::clone(&global_types), + path_store.clone(), + )), + import_graph: Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))), + type_cache: jrsonnet_lsp_inference::new_shared_cache( + Arc::clone(&global_types), + path_store, + ), + global_types, + } + } + + fn test_path(name: &str) -> CanonicalPath { + CanonicalPath::new(format!("/test/{name}.jsonnet").into()) + } + + #[test] + fn test_basic_diagnostics() { + let config = test_config(); + let documents = Arc::clone(&config.documents); + let runner = AsyncDiagnostics::new(config); + + let path = test_path("test"); + let file = documents.intern(&path); + let text = "{ a: 1 }".to_string(); + let version = DocVersion::new(1); + + runner.schedule(file, text, version, false, vec![]); + + // Wait for result with timeout + let result = runner + .results() + .recv_timeout(Duration::from_secs(2)) + .expect("should receive result"); + + assert_eq!(result.params.uri, path.to_uri().expect("valid URI")); + assert!(result.params.diagnostics.is_empty()); + } + + #[test] + fn test_debouncing() { + let config = test_config(); + let documents = Arc::clone(&config.documents); + let runner = AsyncDiagnostics::new(config); + + let path = test_path("test"); + let file = documents.intern(&path); + + // Schedule multiple requests rapidly + for i in 0..5 { + let text = format!("{{ a: {i} }}"); + let version = DocVersion::new(i); + runner.schedule(file, text, version, false, vec![]); + } + + // Should only get one result (the last one) due to debouncing + // Timeout needs to be > debounce delay (500ms) + computation time + let result = runner + .results() + .recv_timeout(Duration::from_secs(3)) + .expect("should receive result"); + + assert_eq!(result.params.uri, path.to_uri().expect("valid URI")); + + // Should not get more results immediately (other requests were debounced) + runner + .results() + .recv_timeout(Duration::from_millis(200)) + .expect_err("should not receive more results after debouncing"); + } + + #[test] + fn test_debouncing_with_equivalent_paths() { + let config = test_config(); + let documents = Arc::clone(&config.documents); + let runner = AsyncDiagnostics::new(config); + let file = documents.intern(&test_path("test")); + + // Use distinct CanonicalPath instances with the same value. + runner.schedule( + file, + "{ a: 1 }".to_string(), + DocVersion::new(1), + false, + vec![], + ); + runner.schedule( + file, + "{ a: 2 }".to_string(), + DocVersion::new(2), + false, + vec![], + ); + + let result = runner + .results() + .recv_timeout(Duration::from_secs(3)) + .expect("should receive result"); + + assert_eq!( + result.params.uri, + test_path("test").to_uri().expect("valid URI") + ); + + // Should not get more results immediately (first request was debounced). + runner + .results() + .recv_timeout(Duration::from_millis(200)) + .expect_err("should not receive more results after debouncing"); + } + + #[test] + fn test_syntax_errors() { + let config = test_config(); + let documents = Arc::clone(&config.documents); + let runner = AsyncDiagnostics::new(config); + + let path = test_path("test"); + let file = documents.intern(&path); + let text = "{ a: }".to_string(); + let version = DocVersion::new(1); + + runner.schedule(file, text, version, false, vec![]); + + let result = runner + .results() + .recv_timeout(Duration::from_secs(2)) + .expect("should receive result"); + + assert!(!result.params.diagnostics.is_empty()); + // Check that at least one diagnostic mentions "expected" + let expected_msgs: Vec<_> = result + .params + .diagnostics + .iter() + .filter(|d| d.message.contains("expected")) + .collect(); + assert!( + !expected_msgs.is_empty(), + "Expected diagnostic message containing 'expected', got: {:?}", + result + .params + .diagnostics + .iter() + .map(|d| &d.message) + .collect::>() + ); + } + + #[test] + fn test_collect_latest_requests_keeps_newest_per_file() { + let paths = PathStore::new(); + let file_a = paths.intern(&test_path("a")); + let file_b = paths.intern(&test_path("b")); + let (sender, receiver) = crossbeam_channel::unbounded(); + + let first = test_request(file_a, 1, "{ a: 1 }"); + sender + .send(test_request(file_b, 2, "{ b: 1 }")) + .expect("send b"); + sender + .send(test_request(file_a, 3, "{ a: 2 }")) + .expect("send latest a"); + + let coalesced = AsyncDiagnostics::collect_latest_requests(first, &receiver); + assert_eq!(coalesced.len(), 2); + + let sequences: FxHashMap<_, _> = coalesced + .into_iter() + .map(|request| (request.file_id, request.sequence)) + .collect(); + assert_eq!(sequences.get(&file_a), Some(&3)); + assert_eq!(sequences.get(&file_b), Some(&2)); + } + + fn test_request(file_id: FileId, sequence: u64, text: &str) -> DiagnosticsRequest { + DiagnosticsRequest { + file_id, + text: text.to_string(), + version: DocVersion::new(i32::try_from(sequence).expect("sequence fits i32")), + enable_lint: false, + import_roots: vec![], + sequence, + } + } +} diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs new file mode 100644 index 00000000..257969f3 --- /dev/null +++ b/crates/jrsonnet-lsp/src/config.rs @@ -0,0 +1,814 @@ +//! LSP server configuration. +//! +//! Handles configuration options passed via LSP initialization or +//! workspace/didChangeConfiguration notifications. +//! +//! Configuration is compatible with the Go [grafana/jsonnet-language-server](https://github.com/grafana/jsonnet-language-server). + +use std::{collections::HashMap, fmt, path::PathBuf}; + +// Re-export config types from handlers crate +pub use jrsonnet_lsp_handlers::{ + AnonymousFunctionReturnHintsMode, CallArgumentHintsMode, CodeActionConfig, + ComprehensionHintsMode, DestructuringHintsMode, FormattingCommentStyle, FormattingConfig, + FormattingStringStyle, FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, + ObjectMemberHintsMode, RemoveUnusedCommentsMode, RemoveUnusedMode, +}; +use serde::{Deserialize, Serialize}; + +/// Tanka import-root resolution mode. +/// +/// Accepts string values `"false"`, `"auto"`, `"true"` and also boolean values +/// (`false`/`true`) for convenience. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)] +#[serde(rename_all = "lowercase")] +pub enum ResolvePathsWithTankaMode { + /// Disable Tanka-based path resolution. + False, + /// Resolve using Tanka conventions only when a Tanka root marker is found. + Auto, + /// Force Tanka path resolution behavior even when no marker is found. + True, +} + +impl Default for ResolvePathsWithTankaMode { + fn default() -> Self { + Self::Auto + } +} + +impl ResolvePathsWithTankaMode { + #[must_use] + pub const fn is_enabled(self) -> bool { + !matches!(self, Self::False) + } + + #[must_use] + pub const fn is_forced(self) -> bool { + matches!(self, Self::True) + } +} + +impl fmt::Display for ResolvePathsWithTankaMode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::False => write!(f, "false"), + Self::Auto => write!(f, "auto"), + Self::True => write!(f, "true"), + } + } +} + +impl<'de> Deserialize<'de> for ResolvePathsWithTankaMode { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + #[derive(Deserialize)] + #[serde(untagged)] + enum Repr { + Bool(bool), + String(String), + } + + match Repr::deserialize(deserializer)? { + Repr::Bool(value) => Ok(if value { Self::True } else { Self::False }), + Repr::String(value) => { + if value.eq_ignore_ascii_case("false") { + Ok(Self::False) + } else if value.eq_ignore_ascii_case("auto") { + Ok(Self::Auto) + } else if value.eq_ignore_ascii_case("true") { + Ok(Self::True) + } else { + Err(serde::de::Error::custom( + "resolve_paths_with_tanka must be one of: false, auto, true", + )) + } + } + } + } +} + +/// Server configuration options. +/// +/// These can be passed via: +/// - `initializationOptions` in the initialize request +/// - `workspace/didChangeConfiguration` notification +/// +/// Field names use `snake_case` internally but accept camelCase from JSON +/// for compatibility with VS Code settings. +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] +#[serde(default)] +pub struct ServerConfig { + /// Library search paths for imports (equivalent to jsonnet -J flag). + /// Paths are searched in order when resolving imports. + /// Right-most paths take precedence. + #[serde(alias = "jpath")] + pub jpath: Vec, + + /// External variables (equivalent to jsonnet --ext-str flag). + /// Maps variable names to string values. + #[serde(alias = "extVars", alias = "ext_vars")] + pub ext_vars: HashMap, + + /// External code variables (equivalent to jsonnet --ext-code flag). + /// Maps variable names to Jsonnet code that will be evaluated. + #[serde(alias = "extCode", alias = "ext_code")] + pub ext_code: HashMap, + + /// Enable evaluation-based diagnostics. + /// When enabled, the server will evaluate Jsonnet files and report runtime errors. + /// This can be slow for large files and may produce false positives. + #[serde(alias = "enableEvalDiagnostics", alias = "eval")] + pub enable_eval_diagnostics: bool, + + /// Enable linting diagnostics. + #[serde(alias = "enableLintDiagnostics", alias = "lint")] + pub enable_lint_diagnostics: bool, + + /// Resolve import paths using Tanka conventions. + /// - `false`: disabled + /// - `auto` (default): enabled only when a Tanka root marker exists + /// - `true`: force-enabled even without root markers + #[serde(alias = "resolvePathsWithTanka", alias = "tankaMode")] + pub resolve_paths_with_tanka: ResolvePathsWithTankaMode, + + /// Formatting options. + #[serde(default)] + pub formatting: FormattingConfig, + + /// Code action options. + #[serde(default, alias = "codeActions")] + pub code_actions: CodeActionConfig, + + /// Inlay hint options. + #[serde(default, alias = "inlayHints")] + pub inlay_hints: InlayHintsConfig, + + /// Log level for the server (error, warn, info, debug). + #[serde(alias = "logLevel", alias = "log_level")] + pub log_level: Option, +} + +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(default)] +struct ServerConfigPatch { + #[serde(alias = "jpath")] + jpath: Option>, + #[serde(alias = "extVars", alias = "ext_vars")] + ext_vars: Option>, + #[serde(alias = "extCode", alias = "ext_code")] + ext_code: Option>, + #[serde(alias = "enableEvalDiagnostics", alias = "eval")] + enable_eval_diagnostics: Option, + #[serde(alias = "enableLintDiagnostics", alias = "lint")] + enable_lint_diagnostics: Option, + #[serde(alias = "resolvePathsWithTanka", alias = "tankaMode")] + resolve_paths_with_tanka: Option, + formatting: Option, + #[serde(rename = "codeActions")] + code_actions: Option, + #[serde(rename = "inlayHints")] + inlay_hints: Option, + #[serde(alias = "logLevel", alias = "log_level")] + log_level: Option, +} + +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(default)] +struct FormattingConfigPatch { + #[serde(alias = "Indent")] + indent: Option, + #[serde(alias = "maxBlankLines", alias = "MaxBlankLines")] + max_blank_lines: Option, + #[serde(alias = "stringStyle", alias = "StringStyle")] + string_style: Option, + #[serde(alias = "commentStyle", alias = "CommentStyle")] + comment_style: Option, + #[serde(alias = "padArrays", alias = "PadArrays")] + pad_arrays: Option, + #[serde(alias = "padObjects", alias = "PadObjects")] + pad_objects: Option, + #[serde(alias = "prettyFieldNames", alias = "PrettyFieldNames")] + pretty_field_names: Option, +} + +impl ServerConfigPatch { + fn is_empty(&self) -> bool { + self.jpath.is_none() + && self.ext_vars.is_none() + && self.ext_code.is_none() + && self.enable_eval_diagnostics.is_none() + && self.enable_lint_diagnostics.is_none() + && self.resolve_paths_with_tanka.is_none() + && self.formatting.is_none() + && self.code_actions.is_none() + && self.inlay_hints.is_none() + && self.log_level.is_none() + } + + fn apply(self, config: &mut ServerConfig) { + if let Some(jpath) = self.jpath { + config.jpath = jpath; + } + if let Some(ext_vars) = self.ext_vars { + config.ext_vars = ext_vars; + } + if let Some(ext_code) = self.ext_code { + config.ext_code = ext_code; + } + if let Some(enable_eval_diagnostics) = self.enable_eval_diagnostics { + config.enable_eval_diagnostics = enable_eval_diagnostics; + } + if let Some(enable_lint_diagnostics) = self.enable_lint_diagnostics { + config.enable_lint_diagnostics = enable_lint_diagnostics; + } + if let Some(resolve_paths_with_tanka) = self.resolve_paths_with_tanka { + config.resolve_paths_with_tanka = resolve_paths_with_tanka; + } + if let Some(formatting) = self.formatting { + config.merge_formatting(formatting); + } + if let Some(code_actions) = self.code_actions { + config.code_actions = code_actions; + } + if let Some(inlay_hints) = self.inlay_hints { + config.inlay_hints = inlay_hints; + } + if let Some(log_level) = self.log_level { + config.log_level = log_level.as_str().map(ToString::to_string); + } + } +} + +impl ServerConfig { + /// Create a new default configuration. + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Parse configuration from LSP initialization options. + #[must_use] + pub fn from_initialization_options(value: Option) -> Self { + let mut config = Self::default(); + if let Some(value) = value { + if let Ok(patch) = serde_json::from_value::(value) { + patch.apply(&mut config); + } + } + config + } + + /// Update configuration from a didChangeConfiguration notification. + /// Returns true if the configuration was updated. + pub fn update_from_settings(&mut self, settings: serde_json::Value) -> bool { + let Ok(patch) = serde_json::from_value::(settings) else { + return false; + }; + if patch.is_empty() { + return false; + } + patch.apply(self); + true + } + + /// Merge another configuration into this one. + /// Values from `other` take precedence. + pub fn merge(&mut self, other: ServerConfig) { + if !other.jpath.is_empty() { + self.jpath = other.jpath; + } + if !other.ext_vars.is_empty() { + self.ext_vars.extend(other.ext_vars); + } + if !other.ext_code.is_empty() { + self.ext_code.extend(other.ext_code); + } + if other.enable_eval_diagnostics { + self.enable_eval_diagnostics = true; + } + if other.enable_lint_diagnostics { + self.enable_lint_diagnostics = true; + } + if other.resolve_paths_with_tanka != ResolvePathsWithTankaMode::default() { + self.resolve_paths_with_tanka = other.resolve_paths_with_tanka; + } + if other.log_level.is_some() { + self.log_level = other.log_level; + } + self.formatting = other.formatting; + if other.code_actions != CodeActionConfig::default() { + self.code_actions = other.code_actions; + } + if other.inlay_hints != InlayHintsConfig::default() { + self.inlay_hints = other.inlay_hints; + } + } + + /// Merge formatting configuration. + fn merge_formatting(&mut self, other: FormattingConfigPatch) { + if let Some(indent) = other.indent { + self.formatting.indent = indent; + } + if let Some(max_blank_lines) = other.max_blank_lines { + self.formatting.max_blank_lines = max_blank_lines; + } + if let Some(string_style) = other.string_style { + self.formatting.string_style = string_style; + } + if let Some(comment_style) = other.comment_style { + self.formatting.comment_style = comment_style; + } + if let Some(pad_arrays) = other.pad_arrays { + self.formatting.pad_arrays = pad_arrays; + } + if let Some(pad_objects) = other.pad_objects { + self.formatting.pad_objects = pad_objects; + } + if let Some(pretty_field_names) = other.pretty_field_names { + self.formatting.pretty_field_names = pretty_field_names; + } + } + + /// Get all library paths for import resolution. + /// Returns jpath entries. + #[must_use] + pub fn library_paths(&self) -> &[PathBuf] { + &self.jpath + } + + /// Check if a jpath is configured. + #[must_use] + pub fn has_jpath(&self) -> bool { + !self.jpath.is_empty() + } + + /// Check if evaluation diagnostics are enabled. + #[must_use] + pub fn eval_diagnostics_enabled(&self) -> bool { + self.enable_eval_diagnostics + } + + /// Check if lint diagnostics are enabled. + #[must_use] + pub fn lint_diagnostics_enabled(&self) -> bool { + self.enable_lint_diagnostics + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use rstest::rstest; + + use super::*; + + #[derive(Debug, PartialEq, Eq)] + struct SettingsUpdate { + updated: bool, + config: ServerConfig, + } + + fn apply_settings(config: ServerConfig, settings: serde_json::Value) -> SettingsUpdate { + let mut config = config; + let updated = config.update_from_settings(settings); + SettingsUpdate { updated, config } + } + + #[test] + fn test_default_config() { + assert_eq!( + ServerConfig::new(), + ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::Auto, + code_actions: CodeActionConfig::default(), + inlay_hints: InlayHintsConfig::default(), + ..ServerConfig::default() + } + ); + } + + #[test] + fn test_parse_from_json_camel_case() { + let json = serde_json::json!({ + "jpath": ["/usr/share/jsonnet", "./lib"], + "extVars": { + "environment": "production", + "version": "1.0.0" + }, + "extCode": { + "config": "{ key: 'value' }" + }, + "enableEvalDiagnostics": true + }); + + assert_eq!( + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + jpath: vec![PathBuf::from("/usr/share/jsonnet"), PathBuf::from("./lib")], + ext_vars: HashMap::from([ + ("environment".to_string(), "production".to_string()), + ("version".to_string(), "1.0.0".to_string()), + ]), + ext_code: HashMap::from([("config".to_string(), "{ key: 'value' }".to_string())]), + enable_eval_diagnostics: true, + ..ServerConfig::default() + } + ); + } + + #[test] + fn test_parse_from_json_snake_case() { + let json = serde_json::json!({ + "jpath": ["/usr/share/jsonnet"], + "ext_vars": { + "env": "dev" + }, + "resolve_paths_with_tanka": true + }); + + assert_eq!( + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + jpath: vec![PathBuf::from("/usr/share/jsonnet")], + ext_vars: HashMap::from([("env".to_string(), "dev".to_string())]), + resolve_paths_with_tanka: ResolvePathsWithTankaMode::True, + ..ServerConfig::default() + } + ); + } + + #[test] + fn test_parse_tanka_mode_string() { + let json = serde_json::json!({ + "resolvePathsWithTanka": "auto" + }); + + assert_eq!( + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::Auto, + ..ServerConfig::default() + } + ); + } + + #[test] + fn test_parse_tanka_mode_legacy_bool_false() { + let json = serde_json::json!({ + "tankaMode": false + }); + + assert_eq!( + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::False, + ..ServerConfig::default() + } + ); + } + + #[test] + fn test_parse_empty_options() { + assert_eq!( + ServerConfig::from_initialization_options(None), + ServerConfig::default() + ); + } + + #[test] + fn test_merge_configs() { + let mut base = ServerConfig { + jpath: vec![PathBuf::from("/base/path")], + ext_vars: HashMap::from([("key1".to_string(), "value1".to_string())]), + ..ServerConfig::default() + }; + let other = ServerConfig { + jpath: vec![PathBuf::from("/other/path")], + ext_vars: HashMap::from([("key2".to_string(), "value2".to_string())]), + enable_eval_diagnostics: true, + ..ServerConfig::default() + }; + + base.merge(other); + + assert_eq!( + base, + ServerConfig { + jpath: vec![PathBuf::from("/other/path")], + ext_vars: HashMap::from([ + ("key1".to_string(), "value1".to_string()), + ("key2".to_string(), "value2".to_string()), + ]), + enable_eval_diagnostics: true, + ..ServerConfig::default() + } + ); + } + + #[test] + fn test_update_from_settings() { + let settings = serde_json::json!({ + "jpath": ["/new/path"], + "enableEvalDiagnostics": true, + "logLevel": "debug" + }); + + assert_eq!( + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: true, + config: ServerConfig { + jpath: vec![PathBuf::from("/new/path")], + enable_eval_diagnostics: true, + log_level: Some("debug".to_string()), + ..ServerConfig::default() + } + } + ); + } + + #[test] + fn test_update_from_settings_can_disable_eval_diagnostics() { + let settings = serde_json::json!({ + "enableEvalDiagnostics": false + }); + + assert_eq!( + apply_settings( + ServerConfig { + enable_eval_diagnostics: true, + ..ServerConfig::default() + }, + settings + ), + SettingsUpdate { + updated: true, + config: ServerConfig::default() + } + ); + } + + #[test] + fn test_update_from_settings_updates_tanka_mode_to_true() { + let settings = serde_json::json!({ + "resolvePathsWithTanka": "true" + }); + assert_eq!( + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: true, + config: ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::True, + ..ServerConfig::default() + } + } + ); + } + + #[test] + fn test_update_from_settings_updates_tanka_mode_to_false() { + let settings = serde_json::json!({ + "resolvePathsWithTanka": false + }); + assert_eq!( + apply_settings( + ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::True, + ..ServerConfig::default() + }, + settings + ), + SettingsUpdate { + updated: true, + config: ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::False, + ..ServerConfig::default() + } + } + ); + } + + #[test] + fn test_code_action_config_from_initialization_options() { + let json = serde_json::json!({ + "codeActions": { + "removeUnused": "nonImportBindings", + "removeUnusedComments": "above" + } + }); + + assert_eq!( + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + code_actions: CodeActionConfig { + remove_unused: RemoveUnusedMode::NonImportBindings, + remove_unused_comments: RemoveUnusedCommentsMode::Above, + }, + ..ServerConfig::default() + } + ); + } + + #[test] + fn test_update_from_settings_updates_code_actions_config() { + let settings = serde_json::json!({ + "codeActions": { + "removeUnused": "nonImportBindings", + "removeUnusedComments": "below" + } + }); + + assert_eq!( + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: true, + config: ServerConfig { + code_actions: CodeActionConfig { + remove_unused: RemoveUnusedMode::NonImportBindings, + remove_unused_comments: RemoveUnusedCommentsMode::Below, + }, + ..ServerConfig::default() + } + } + ); + } + + #[test] + fn test_inlay_hints_config_from_initialization_options() { + let json = serde_json::json!({ + "inlayHints": { + "local": "off", + "objectMembers": "fields" + } + }); + + assert_eq!( + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + inlay_hints: InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::All, + object_members: ObjectMemberHintsMode::Fields, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, + }, + ..ServerConfig::default() + } + ); + } + + #[test] + fn test_update_from_settings_updates_inlay_hints_config() { + let settings = serde_json::json!({ + "inlayHints": { + "local": "functions", + "objectMembers": "methods" + } + }); + + assert_eq!( + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: true, + config: ServerConfig { + inlay_hints: InlayHintsConfig { + local: LocalHintsMode::Functions, + object_local: LocalHintsMode::All, + object_members: ObjectMemberHintsMode::Methods, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, + }, + ..ServerConfig::default() + } + } + ); + } + + #[test] + fn test_formatting_config() { + let json = serde_json::json!({ + "formatting": { + "indent": 4, + "string_style": "double", + "pad_arrays": true + } + }); + + assert_eq!( + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + formatting: FormattingConfig { + indent: 4, + string_style: FormattingStringStyle::Double, + pad_arrays: true, + ..FormattingConfig::default() + }, + ..ServerConfig::default() + } + ); + } + + #[test] + fn test_formatting_config_accepts_legacy_go_jsonnet_aliases() { + let json = serde_json::json!({ + "formatting": { + "Indent": 4, + "MaxBlankLines": 1, + "StringStyle": "single", + "CommentStyle": "hash", + "PadArrays": true, + "PadObjects": false, + "PrettyFieldNames": false + } + }); + + assert_eq!( + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + formatting: FormattingConfig { + indent: 4, + max_blank_lines: 1, + string_style: FormattingStringStyle::Single, + comment_style: FormattingCommentStyle::Hash, + pad_arrays: true, + pad_objects: false, + pretty_field_names: false, + ..FormattingConfig::default() + }, + ..ServerConfig::default() + } + ); + } + + #[rstest] + #[case( + serde_json::json!({"formatting":{"stringStyle":"double","padObjects":false}}), + FormattingConfig { + string_style: FormattingStringStyle::Double, + pad_objects: false, + ..FormattingConfig::default() + } + )] + #[case( + serde_json::json!({"formatting":{"StringStyle":"single","PadObjects":false}}), + FormattingConfig { + string_style: FormattingStringStyle::Single, + pad_objects: false, + ..FormattingConfig::default() + } + )] + fn test_update_from_settings_accepts_formatting_aliases( + #[case] settings: serde_json::Value, + #[case] expected_formatting: FormattingConfig, + ) { + assert_eq!( + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: true, + config: ServerConfig { + formatting: expected_formatting, + ..ServerConfig::default() + } + } + ); + } + + #[test] + fn test_formatting_rejects_out_of_range_indent_in_initialization_options() { + let json = serde_json::json!({ + "formatting": { + "indent": 300 + } + }); + assert_eq!( + ServerConfig::from_initialization_options(Some(json)), + ServerConfig::default() + ); + } + + #[test] + fn test_formatting_rejects_out_of_range_indent_in_settings_update() { + let settings = serde_json::json!({ + "formatting": { + "indent": 300 + } + }); + assert_eq!( + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: false, + config: ServerConfig::default() + } + ); + } +} diff --git a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs new file mode 100644 index 00000000..a1a60d58 --- /dev/null +++ b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs @@ -0,0 +1,648 @@ +//! Diagnostics handler for publishing parse errors, lint warnings, and evaluation errors. + +use jrsonnet_lsp_check::{lint, type_check}; +use jrsonnet_lsp_document::{CanonicalPath, Document, LineIndex, SyntaxError}; +use jrsonnet_lsp_import::ImportOccurrence; +use jrsonnet_lsp_inference::TypeAnalysis; +use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString, Position, Range}; + +use crate::analysis::{EvalDiagnostic, Evaluator}; + +/// Convert a syntax error to an LSP diagnostic. +fn syntax_error_to_diagnostic( + error: &SyntaxError, + line_index: &LineIndex, + text: &str, +) -> Diagnostic { + let range = error.range; + + // Convert rowan TextRange to LSP Range + let start_pos = line_index + .position(range.start().into(), text) + .unwrap_or_default(); + let end_pos = line_index + .position(range.end().into(), text) + .unwrap_or_default(); + + Diagnostic { + range: Range { + start: start_pos.into(), + end: end_pos.into(), + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: error.to_string(), + related_information: None, + tags: None, + data: None, + } +} + +fn unresolved_import_to_diagnostic( + occurrence: &ImportOccurrence, + line_index: &LineIndex, + text: &str, +) -> Option { + if occurrence.entry.resolved_path.is_some() { + return None; + } + + let start = line_index.position(occurrence.import_range.start().into(), text)?; + let end = line_index.position(occurrence.import_range.end().into(), text)?; + + Some(Diagnostic { + range: Range { + start: start.into(), + end: end.into(), + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("unresolved-import".to_string())), + code_description: None, + source: Some("jrsonnet-import".to_string()), + message: format!("unable to resolve import: {}", occurrence.entry.import_path), + related_information: None, + tags: None, + data: None, + }) +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +struct OrderedPosition(Position); + +impl Ord for OrderedPosition { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + (self.0.line, self.0.character).cmp(&(other.0.line, other.0.character)) + } +} + +impl PartialOrd for OrderedPosition { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +fn ranges_overlap(a: &Range, b: &Range) -> bool { + OrderedPosition(a.start) <= OrderedPosition(b.end) + && OrderedPosition(b.start) <= OrderedPosition(a.end) +} + +fn should_suppress_eval_diagnostic(eval_diag: &EvalDiagnostic, lint_type_ranges: &[Range]) -> bool { + if !eval_diag.is_type_like() { + return false; + } + if lint_type_ranges.is_empty() { + return false; + } + let eval_range = &eval_diag.diagnostic.range; + if *eval_range == Range::default() { + return true; + } + + lint_type_ranges + .iter() + .any(|range| ranges_overlap(range, eval_range)) +} + +/// Compute diagnostics for a document. +/// +/// # Arguments +/// * `document` - The document to check +/// * `path` - The canonical path of the document (needed for evaluation) +/// * `enable_lint` - Whether to include lint warnings +/// * `evaluator` - Optional evaluator for runtime error detection +/// * `uri` - The URI of the document (needed for lint related information) +/// * `analysis` - Precomputed type analysis used by lint type checks +/// * `import_occurrences` - Parsed import occurrences with source ranges +pub fn compute_diagnostics( + document: &Document, + path: &CanonicalPath, + enable_lint: bool, + evaluator: Option<&Evaluator>, + uri: &lsp_types::Uri, + analysis: &TypeAnalysis, + import_occurrences: &[ImportOccurrence], +) -> Vec { + let text = document.text(); + let line_index = document.line_index(); + let errors = document.errors(); + + let mut diagnostics: Vec = errors + .iter() + .map(|e| syntax_error_to_diagnostic(e, line_index, text)) + .collect(); + + if errors.is_empty() { + diagnostics.extend(import_occurrences.iter().filter_map(|occurrence| { + unresolved_import_to_diagnostic(occurrence, line_index, text) + })); + } + + // Add lint diagnostics if enabled and the document parsed successfully + if enable_lint && errors.is_empty() { + let lint_config = lint::LintConfig::all_except_type_errors(); + let lint_diagnostics = lint::lint(document, analysis, &lint_config, uri); + diagnostics.extend(lint_diagnostics); + + let type_check_config = type_check::TypeCheckConfig::all(); + let type_errors = type_check::check_types(document, analysis, &type_check_config); + let type_error_diagnostics: Vec = type_errors + .into_iter() + .map(|error| error.to_diagnostic(line_index, text, analysis)) + .collect(); + let lint_type_ranges: Vec = type_error_diagnostics + .iter() + .map(|diag| diag.range) + .collect(); + diagnostics.extend(type_error_diagnostics); + + // Add evaluation diagnostics if enabled and the document parsed successfully + if let Some(eval) = evaluator.filter(|_| errors.is_empty()) { + if let Some(eval_diag) = eval.evaluate(path, text, line_index) { + if !should_suppress_eval_diagnostic(&eval_diag, &lint_type_ranges) { + diagnostics.push(eval_diag.diagnostic); + } + } + } + return diagnostics; + } + + // Add evaluation diagnostics if enabled and the document parsed successfully + if let Some(eval) = evaluator.filter(|_| errors.is_empty()) { + if let Some(eval_diag) = eval.evaluate(path, text, line_index) { + diagnostics.push(eval_diag.diagnostic); + } + } + + diagnostics +} + +/// Create a `PublishDiagnostics` notification. +/// +/// # Arguments +/// * `path` - The canonical path of the document +/// * `document` - The document to check +/// * `enable_lint` - Whether to include lint warnings +/// * `evaluator` - Optional evaluator for runtime error detection +/// * `analysis` - Precomputed type analysis used by lint type checks +/// * `import_occurrences` - Parsed import occurrences with source ranges +pub fn publish_diagnostics_params( + path: &CanonicalPath, + document: &Document, + enable_lint: bool, + evaluator: Option<&Evaluator>, + analysis: &TypeAnalysis, + import_occurrences: &[ImportOccurrence], +) -> Option { + let uri = path.to_uri().ok()?; + let diagnostics = compute_diagnostics( + document, + path, + enable_lint, + evaluator, + &uri, + analysis, + import_occurrences, + ); + + Some(lsp_types::PublishDiagnosticsParams { + uri, + diagnostics, + version: Some(document.version().0), + }) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_import::{parse_document_import_occurrences, ImportOccurrence}; + use jrsonnet_lsp_inference::TypeAnalysis; + use lsp_types::{Position, Uri}; + + use super::*; + use crate::analysis::EvalConfig; + + fn test_uri() -> Uri { + "file:///test.jsonnet".parse().expect("expected success") + } + + fn test_path() -> CanonicalPath { + CanonicalPath::new("/test.jsonnet".into()) + } + + fn test_evaluator() -> Evaluator { + Evaluator::new(&EvalConfig::default()) + } + + fn diagnostics_for( + doc: &Document, + enable_lint: bool, + evaluator: Option<&Evaluator>, + ) -> Vec { + diagnostics_for_with_occurrences(doc, enable_lint, evaluator, &[]) + } + + fn diagnostics_for_with_occurrences( + doc: &Document, + enable_lint: bool, + evaluator: Option<&Evaluator>, + import_occurrences: &[ImportOccurrence], + ) -> Vec { + let analysis = TypeAnalysis::analyze(doc); + compute_diagnostics( + doc, + &test_path(), + enable_lint, + evaluator, + &test_uri(), + &analysis, + import_occurrences, + ) + } + + fn parse_occurrences(doc: &Document) -> Vec { + let path = test_path(); + parse_document_import_occurrences(doc, &|import| { + if import == "exists.libsonnet" { + let mut resolved = path.as_path().parent()?.to_path_buf(); + resolved.push(import); + return Some(CanonicalPath::new(resolved)); + } + None + }) + } + + #[test] + fn test_valid_document_no_diagnostics() { + let doc = Document::new(r#"{ hello: "world" }"#.to_string(), DocVersion::new(1)); + assert_eq!(diagnostics_for(&doc, false, None), vec![]); + } + + #[test] + fn test_syntax_error_produces_diagnostic() { + let doc = Document::new("{ hello: }".to_string(), DocVersion::new(1)); + assert_eq!( + diagnostics_for(&doc, false, None), + vec![Diagnostic { + range: Range { + start: Position { + line: 0, + character: 9 + }, + end: Position { + line: 0, + character: 9 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected expression".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_diagnostic_position() { + // Error is at the closing brace position (line 2, char 0) + // "{\n a: \n}" - missing value after 'a:' + let doc = Document::new("{\n a: \n}".to_string(), DocVersion::new(1)); + assert_eq!( + diagnostics_for(&doc, false, None), + vec![Diagnostic { + range: Range { + start: Position { + line: 2, + character: 0 + }, + end: Position { + line: 2, + character: 0 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected expression".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_lint_diagnostics_when_enabled() { + // Has unused variable 'x' + let doc = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + assert_eq!( + diagnostics_for(&doc, true, None), + vec![Diagnostic { + range: Range { + start: Position { + line: 0, + character: 6 + }, + end: Position { + line: 0, + character: 7 + }, + }, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("unused-variable".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable: `x`; prefix with `_` to silence this warning" + .to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_lint_diagnostics_when_disabled() { + // Has unused variable 'x' but lint is disabled + let doc = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + assert_eq!(diagnostics_for(&doc, false, None), vec![]); + } + + #[test] + fn test_type_diagnostics_when_enabled() { + let doc = Document::new(r#""str" + {}"#.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze(&doc); + let lint_diags = lint::lint(&doc, &analysis, &lint::LintConfig::all(), &test_uri()); + assert_eq!( + lint_diags, + vec![Diagnostic { + range: Range { + start: Position { + line: 0, + character: 0 + }, + end: Position { + line: 0, + character: 10 + }, + }, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("type-error".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "operator `+` requires matching types (number+number, string+string, array+array, or object+object), got (string, {})".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + assert_eq!( + diagnostics_for(&doc, true, None), + vec![Diagnostic { + range: Range { + start: Position { + line: 0, + character: 0 + }, + end: Position { + line: 0, + character: 10 + }, + }, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("type-error".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "operator `+` requires matching types (number+number, string+string, array+array, or object+object), got (string, {})".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_type_diagnostics_when_disabled() { + let doc = Document::new(r#""str" + {}"#.to_string(), DocVersion::new(1)); + assert_eq!(diagnostics_for(&doc, false, None), vec![]); + } + + #[test] + fn test_lint_not_run_on_syntax_errors() { + // Has both syntax error and what would be unused variable + let doc = Document::new("local x = 1; {".to_string(), DocVersion::new(1)); + // Should only have syntax errors, not lint warnings + assert_eq!( + diagnostics_for(&doc, true, None), + vec![ + Diagnostic { + range: Range { + start: Position { + line: 0, + character: 14 + }, + end: Position { + line: 0, + character: 14 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected field name".to_string(), + related_information: None, + tags: None, + data: None, + }, + Diagnostic { + range: Range { + start: Position { + line: 0, + character: 14 + }, + end: Position { + line: 0, + character: 14 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected PLUS, L_PAREN, COLON, COLONCOLON or COLONCOLONCOLON" + .to_string(), + related_information: None, + tags: None, + data: None, + }, + Diagnostic { + range: Range { + start: Position { + line: 0, + character: 14 + }, + end: Position { + line: 0, + character: 14 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected expression".to_string(), + related_information: None, + tags: None, + data: None, + }, + Diagnostic { + range: Range { + start: Position { + line: 0, + character: 14 + }, + end: Position { + line: 0, + character: 14 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected R_BRACE, SEMI, COMMA, IF_KW or FOR_KW".to_string(), + related_information: None, + tags: None, + data: None, + }, + ] + ); + } + + #[test] + fn test_eval_diagnostics_when_enabled() { + // Has runtime error (undefined variable) + let doc = Document::new("undefined_var".to_string(), DocVersion::new(1)); + let eval = test_evaluator(); + let diagnostics = diagnostics_for(&doc, false, Some(&eval)); + assert_eq!( + diagnostics, + vec![Diagnostic { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 13, + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("eval-error".to_string())), + code_description: None, + source: Some("jrsonnet-eval".to_string()), + message: "local is not defined: undefined_var".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_eval_diagnostics_not_run_on_syntax_errors() { + // Has both syntax error and what would be runtime error + let doc = Document::new("undefined_var {".to_string(), DocVersion::new(1)); + let eval = test_evaluator(); + let diagnostics = diagnostics_for(&doc, false, Some(&eval)); + // Should only have syntax errors, not eval errors + assert!(!diagnostics.is_empty()); + assert!(diagnostics.iter().all(|d| d + .code + .as_ref() + .is_some_and(|c| matches!(c, NumberOrString::String(s) if s == "syntax-error")))); + } + + #[test] + fn test_eval_type_error_suppressed_when_lint_type_error_exists() { + let doc = Document::new("std.length(1)".to_string(), DocVersion::new(1)); + let eval = test_evaluator(); + let diagnostics = diagnostics_for(&doc, true, Some(&eval)); + + assert_eq!( + diagnostics.len(), + 1, + "expected lint type diagnostic to suppress duplicate eval type diagnostic" + ); + assert_eq!( + diagnostics[0].source.as_deref(), + Some("jrsonnet-lint"), + "expected remaining diagnostic to come from lint" + ); + assert!( + diagnostics + .iter() + .all(|diag| diag.source.as_deref() != Some("jrsonnet-eval")), + "eval diagnostic should be suppressed when equivalent lint type diagnostic exists" + ); + } + + #[test] + fn test_unresolved_import_reports_diagnostic() { + let doc = Document::new( + r#"local lib = import "missing.libsonnet"; lib"#.to_string(), + DocVersion::new(1), + ); + let occurrences = parse_occurrences(&doc); + let diagnostics = diagnostics_for_with_occurrences(&doc, false, None, &occurrences); + + assert_eq!( + diagnostics, + vec![Diagnostic { + range: Range { + start: Position { + line: 0, + character: 19 + }, + end: Position { + line: 0, + character: 38 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("unresolved-import".to_string())), + code_description: None, + source: Some("jrsonnet-import".to_string()), + message: "unable to resolve import: missing.libsonnet".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_unresolved_import_not_reported_on_syntax_error() { + let doc = Document::new( + r#"local lib = import "missing.libsonnet"; {"#.to_string(), + DocVersion::new(1), + ); + let occurrences = parse_occurrences(&doc); + let diagnostics = diagnostics_for_with_occurrences(&doc, false, None, &occurrences); + + assert!(diagnostics.iter().all(|diag| { + matches!( + diag.code.as_ref(), + Some(NumberOrString::String(code)) if code == "syntax-error" + ) + })); + } +} diff --git a/crates/jrsonnet-lsp/src/handlers/mod.rs b/crates/jrsonnet-lsp/src/handlers/mod.rs new file mode 100644 index 00000000..7c763df5 --- /dev/null +++ b/crates/jrsonnet-lsp/src/handlers/mod.rs @@ -0,0 +1,8 @@ +//! LSP request and notification handlers. +//! +//! Most handlers are provided by the `jrsonnet-lsp-handlers` crate. +//! This module contains handlers that depend on the evaluator. + +pub mod diagnostics; + +pub use diagnostics::{compute_diagnostics, publish_diagnostics_params}; diff --git a/crates/jrsonnet-lsp/src/lib.rs b/crates/jrsonnet-lsp/src/lib.rs new file mode 100644 index 00000000..949c4a54 --- /dev/null +++ b/crates/jrsonnet-lsp/src/lib.rs @@ -0,0 +1,31 @@ +//! Jsonnet Language Server Protocol implementation. +//! +//! This crate provides an LSP server for Jsonnet files, offering features like: +//! - Syntax error diagnostics +//! - Go to definition (local bindings and imports) +//! - Hover information (stdlib functions and local definitions) +//! - Document symbols (outline) +//! - Completions (stdlib, local variables, object fields, imports) +//! - Formatting (via jrsonnet-fmt) +//! - Find references (local and cross-file) +//! - Rename (local symbols) +//! - Signature help +//! - Semantic tokens +//! +//! # Configuration +//! +//! The server accepts configuration via: +//! - `initializationOptions` in the initialize request +//! - `workspace/didChangeConfiguration` notifications +//! +//! See [`config::ServerConfig`] for available options. + +pub mod analysis; +pub mod async_diagnostics; +pub mod config; +pub mod handlers; +mod protocol; +pub mod server; + +pub use config::ServerConfig; +pub use server::run_stdio; diff --git a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs new file mode 100644 index 00000000..9e4fa34e --- /dev/null +++ b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs @@ -0,0 +1,393 @@ +use std::marker::PhantomData; + +use anyhow::Result; +use crossbeam_channel::Sender; +use lsp_server::{Message, ReqQueue, RequestId, Response}; +use serde::Serialize; + +use super::request_error::RequestError; + +#[derive(Debug, Clone, PartialEq, Eq)] +struct IncomingRequestMeta { + method: String, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct OutgoingRequestMeta { + pub(crate) method: String, +} + +#[derive(Debug)] +pub(crate) struct InflightRequests { + queue: ReqQueue, + sender: Sender, +} + +#[derive(Debug)] +pub(crate) struct IncomingRequest { + id: RequestId, + _marker: PhantomData R>, +} + +#[derive(Debug)] +pub(crate) struct UnknownIncomingRequest { + id: RequestId, + method: String, +} + +impl IncomingRequest { + #[must_use] + fn new(id: RequestId) -> Self { + Self { + id, + _marker: PhantomData, + } + } + + #[must_use] + pub(crate) fn into_id(self) -> RequestId { + self.id + } +} + +impl UnknownIncomingRequest { + #[must_use] + pub(crate) fn method(&self) -> &str { + &self.method + } +} + +impl InflightRequests { + #[must_use] + pub(crate) fn new(sender: Sender) -> Self { + Self { + queue: ReqQueue::default(), + sender, + } + } + + fn register_incoming(&mut self, id: RequestId, method: &str) { + self.queue.incoming.register( + id, + IncomingRequestMeta { + method: method.to_owned(), + }, + ); + } + + pub(crate) fn begin(&mut self, id: RequestId) -> IncomingRequest + where + R: lsp_types::request::Request, + { + self.register_incoming(id.clone(), R::METHOD); + IncomingRequest::new(id) + } + + pub(crate) fn begin_unknown(&mut self, id: RequestId, method: &str) -> UnknownIncomingRequest { + self.register_incoming(id.clone(), method); + UnknownIncomingRequest { + id, + method: method.to_owned(), + } + } + + pub(crate) fn send_outgoing_request(&mut self, params: R::Params) -> Result<()> + where + R: lsp_types::request::Request, + R::Params: Serialize, + { + let request = self.queue.outgoing.register( + R::METHOD.to_owned(), + params, + OutgoingRequestMeta { + method: R::METHOD.to_owned(), + }, + ); + self.sender.send(Message::Request(request))?; + Ok(()) + } + + pub(crate) fn send_inflight_response(&mut self, response: Response) -> Result { + let Some(meta) = self.queue.incoming.complete(&response.id) else { + return Ok(false); + }; + debug_assert!(!meta.method.is_empty()); + + self.sender.send(Message::Response(response))?; + Ok(true) + } + + pub(crate) fn cancel_request(&mut self, id: RequestId) -> Result { + let Some(meta) = self.queue.incoming.complete(&id) else { + return Ok(false); + }; + let response_error = lsp_server::ResponseError::try_from(RequestError::RequestCanceled { + method: meta.method, + })?; + self.sender.send(Message::Response(Response { + id, + result: None, + error: Some(response_error), + }))?; + Ok(true) + } + + fn send_ok_by_id(&mut self, id: RequestId, result: T) -> Result + where + T: Serialize, + { + let response = Response::new_ok(id, serde_json::to_value(result)?); + self.send_inflight_response(response) + } + + fn send_error_by_id(&mut self, id: RequestId, error: RequestError) -> Result { + let response_error = lsp_server::ResponseError::try_from(error)?; + let response = Response { + id, + result: None, + error: Some(response_error), + }; + self.send_inflight_response(response) + } + + pub(crate) fn send_ok( + &mut self, + request: IncomingRequest, + result: R::Result, + ) -> Result + where + R: lsp_types::request::Request, + R::Result: Serialize, + { + self.send_ok_by_id(request.id, result) + } + + pub(crate) fn send_error( + &mut self, + request: IncomingRequest, + error: RequestError, + ) -> Result + where + R: lsp_types::request::Request, + { + self.send_error_by_id(request.id, error) + } + + pub(crate) fn send_unknown_error( + &mut self, + request: UnknownIncomingRequest, + error: RequestError, + ) -> Result { + self.send_error_by_id(request.id, error) + } + + pub(crate) fn complete_outgoing(&mut self, id: RequestId) -> Option { + self.queue.outgoing.complete(id) + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + use crossbeam_channel::unbounded; + use lsp_server::{ErrorCode, Message, RequestId, Response}; + use lsp_types::request::{CodeLensResolve, RegisterCapability, Request as _}; + + use super::InflightRequests; + use crate::protocol::request_error::{RequestError, RequestErrorData}; + + #[test] + fn send_inflight_response_requires_registered_request_id() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + let id = RequestId::from(7); + + assert!(!inflight + .send_inflight_response(Response::new_ok( + id.clone(), + serde_json::json!({"ok": true}), + )) + .expect("expected success")); + assert!(receiver.try_recv().is_err()); + + inflight.begin_unknown(id.clone(), "example/method"); + assert!(inflight + .send_inflight_response(Response::new_ok(id, serde_json::json!({"ok": true}))) + .expect("expected success")); + + let message = receiver.recv().expect("expected success"); + assert_matches!( + message, + Message::Response(response) if { + assert_matches!( + response, + Response { + ref id, + result: Some(ref result), + error: None, + } + if id == &RequestId::from(7) + && result == &serde_json::json!({"ok": true}) + ); + true + } + ); + } + + #[test] + fn complete_outgoing_returns_none_for_untracked_response() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + assert_eq!(inflight.complete_outgoing(RequestId::from(11)), None); + assert!(receiver.try_recv().is_err()); + } + + #[test] + fn send_ok_uses_typed_handle() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + let id = RequestId::from(15); + let request = inflight.begin::(id); + let expected = lsp_types::CodeLens { + range: lsp_types::Range { + start: lsp_types::Position::new(0, 0), + end: lsp_types::Position::new(0, 1), + }, + command: None, + data: None, + }; + assert!(inflight + .send_ok(request, expected.clone()) + .expect("expected success")); + + let message = receiver.recv().expect("expected success"); + assert_matches!( + message, + Message::Response(response) if { + assert_matches!( + response, + Response { + ref id, + result: Some(ref result), + error: None, + } + if id == &RequestId::from(15) + && result + == &serde_json::to_value(expected).expect("expected success") + ); + true + } + ); + } + + #[test] + fn send_unknown_err_uses_unknown_handle() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + + let request = inflight.begin_unknown(RequestId::from(23), "custom/method"); + assert_eq!(request.method(), "custom/method"); + assert!(inflight + .send_unknown_error( + request, + RequestError::MethodNotFound { + method: "custom/method".to_string(), + }, + ) + .expect("expected success")); + + let message = receiver.recv().expect("expected success"); + let response = assert_matches!(message, Message::Response(response) => response); + assert_matches!( + response, + Response { + ref id, + result: None, + error: Some(ref error), + } + if id == &RequestId::from(23) && error.code == ErrorCode::MethodNotFound as i32 + ); + let data = response + .error + .and_then(|error| error.data) + .expect("method-not-found should include structured error data"); + let data: RequestErrorData = + serde_json::from_value(data).expect("error data should deserialize"); + assert_matches!( + data, + RequestErrorData::MethodNotFound { method } if method == "custom/method" + ); + } + + #[test] + fn cancel_request_sends_request_canceled_error_for_pending_request() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + let id = RequestId::from(31); + inflight.begin_unknown(id.clone(), "textDocument/codeLens"); + + assert!(inflight.cancel_request(id).expect("expected success")); + + let message = receiver.recv().expect("expected success"); + let response = assert_matches!(message, Message::Response(response) => response); + assert_matches!( + response, + Response { + ref id, + result: None, + error: Some(ref error), + } + if id == &RequestId::from(31) && error.code == ErrorCode::RequestCanceled as i32 + ); + let data = response + .error + .and_then(|error| error.data) + .expect("request-canceled should include structured error data"); + let data: RequestErrorData = + serde_json::from_value(data).expect("error data should deserialize"); + assert_matches!( + data, + RequestErrorData::RequestCanceled { method } if method == "textDocument/codeLens" + ); + } + + #[test] + fn cancel_request_ignores_untracked_request() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + + assert!(!inflight + .cancel_request(RequestId::from(41)) + .expect("expected success")); + assert!(receiver.try_recv().is_err()); + } + + #[test] + fn send_outgoing_request_registers_and_tracks_response() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + let params = lsp_types::RegistrationParams { + registrations: vec![], + }; + + inflight + .send_outgoing_request::(params.clone()) + .expect("expected success"); + + let message = receiver.recv().expect("expected success"); + assert_matches!( + message, + Message::Request(request) if { + assert_eq!(request.method, RegisterCapability::METHOD); + let parsed_params: lsp_types::RegistrationParams = + serde_json::from_value(request.params.clone()).expect("expected success"); + assert_eq!(parsed_params, params); + let meta = inflight + .complete_outgoing(request.id.clone()) + .expect("outgoing request should be tracked"); + assert_eq!(meta.method, RegisterCapability::METHOD); + true + } + ); + } +} diff --git a/crates/jrsonnet-lsp/src/protocol/mod.rs b/crates/jrsonnet-lsp/src/protocol/mod.rs new file mode 100644 index 00000000..434acb66 --- /dev/null +++ b/crates/jrsonnet-lsp/src/protocol/mod.rs @@ -0,0 +1,2 @@ +pub(crate) mod inflight_requests; +pub(crate) mod request_error; diff --git a/crates/jrsonnet-lsp/src/protocol/request_error.rs b/crates/jrsonnet-lsp/src/protocol/request_error.rs new file mode 100644 index 00000000..fafba371 --- /dev/null +++ b/crates/jrsonnet-lsp/src/protocol/request_error.rs @@ -0,0 +1,103 @@ +use lsp_server::{ErrorCode, ResponseError}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub(crate) enum RequestErrorData { + InvalidParams { method: String }, + MethodNotFound { method: String }, + UnknownExecuteCommand { command: String }, + MissingExecuteHandler { command: String }, + RequestCanceled { method: String }, + ServerShuttingDown, + AsyncHandlerFailed { method: String }, + AsyncHandlerPanicked { method: String }, +} + +#[derive(Debug, Error, Clone, PartialEq, Eq)] +pub(crate) enum RequestError { + #[error("Invalid params for {method}: {reason}")] + InvalidParams { method: String, reason: String }, + #[error("Method not found: {method}")] + MethodNotFound { method: String }, + #[error("Unknown execute command: {command}")] + UnknownExecuteCommand { command: String }, + #[error("Missing execute handler for custom operation: {command}")] + MissingExecuteHandler { command: String }, + #[error("Request canceled: {method}")] + RequestCanceled { method: String }, + #[error("Server is shutting down")] + ServerShuttingDown, + #[error("{method} failed: {details}")] + AsyncHandlerFailed { method: String, details: String }, + #[error("{method} panicked")] + AsyncHandlerPanicked { method: String }, +} + +impl RequestError { + #[must_use] + pub(crate) fn code(&self) -> ErrorCode { + match self { + Self::InvalidParams { .. } + | Self::UnknownExecuteCommand { .. } + | Self::MissingExecuteHandler { .. } => ErrorCode::InvalidParams, + Self::MethodNotFound { .. } => ErrorCode::MethodNotFound, + Self::RequestCanceled { .. } => ErrorCode::RequestCanceled, + Self::ServerShuttingDown => ErrorCode::InvalidRequest, + Self::AsyncHandlerFailed { .. } | Self::AsyncHandlerPanicked { .. } => { + ErrorCode::InternalError + } + } + } + + #[must_use] + pub(crate) fn invalid_params(method: &str, reason: impl Into) -> Self { + Self::InvalidParams { + method: method.to_string(), + reason: reason.into(), + } + } +} + +impl From<&RequestError> for RequestErrorData { + fn from(error: &RequestError) -> Self { + match error { + RequestError::InvalidParams { method, .. } => Self::InvalidParams { + method: method.clone(), + }, + RequestError::MethodNotFound { method } => Self::MethodNotFound { + method: method.clone(), + }, + RequestError::UnknownExecuteCommand { command } => Self::UnknownExecuteCommand { + command: command.clone(), + }, + RequestError::MissingExecuteHandler { command } => Self::MissingExecuteHandler { + command: command.clone(), + }, + RequestError::RequestCanceled { method } => Self::RequestCanceled { + method: method.clone(), + }, + RequestError::ServerShuttingDown => Self::ServerShuttingDown, + RequestError::AsyncHandlerFailed { method, .. } => Self::AsyncHandlerFailed { + method: method.clone(), + }, + RequestError::AsyncHandlerPanicked { method } => Self::AsyncHandlerPanicked { + method: method.clone(), + }, + } + } +} + +impl TryFrom for ResponseError { + type Error = serde_json::Error; + + fn try_from(error: RequestError) -> Result { + let data = serde_json::to_value(RequestErrorData::from(&error))?; + Ok(ResponseError { + code: error.code() as i32, + message: error.to_string(), + data: Some(data), + }) + } +} diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs new file mode 100644 index 00000000..a79ddb0b --- /dev/null +++ b/crates/jrsonnet-lsp/src/server.rs @@ -0,0 +1,357 @@ +//! LSP server main loop. +//! +//! Uses the lsp-server crate for the transport layer, following the rust-analyzer pattern. +//! Diagnostics are computed asynchronously with debouncing to avoid blocking the event loop. + +mod async_requests; +mod custom_operations; +mod event_loop; +mod import_graph; +mod initialization; +mod notifications; +mod request_dispatch; +mod requests; +mod watched_files; +mod workspace_index; + +use std::{ + collections::BTreeSet, + panic::{catch_unwind, AssertUnwindSafe}, + path::PathBuf, + sync::{atomic::AtomicBool, Arc}, +}; + +use anyhow::Result; +use crossbeam_channel::{Receiver, Sender}; +use jrsonnet_lsp_document::{CanonicalPath, DocVersion, FileId, PathStore}; +use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; +use jrsonnet_lsp_inference::{ + new_shared_cache, DocumentManager, SharedDocumentManager, SharedTypeCache, +}; +use jrsonnet_lsp_types::GlobalTyStore; +use lsp_server::{Connection, Message, Notification, RequestId, Response}; +use lsp_types::{notification::PublishDiagnostics, InitializeParams, OneOf}; +use parking_lot::{Mutex, RwLock}; +use rustc_hash::FxHashSet; +use tracing::{debug, error, info, warn}; + +use self::async_requests::{AsyncRequestContext, WorkspaceRequestState}; +use crate::{ + analysis::{tanka::effective_import_roots, EvalConfig, Evaluator}, + async_diagnostics::{AsyncDiagnostics, DiagnosticsConfig}, + config::ServerConfig, + protocol::{inflight_requests::InflightRequests, request_error::RequestError}, +}; + +/// Shared server configuration. +pub type SharedConfig = Arc>; + +/// LSP server state. +pub struct Server { + /// The LSP connection. + connection: Connection, + /// Document manager. + documents: SharedDocumentManager, + /// Import graph for cross-file references. + import_graph: Arc>, + /// Global type store shared across all analyses. + global_types: Arc, + /// Cross-file type cache for import resolution. + type_cache: SharedTypeCache, + /// Server configuration. + config: SharedConfig, + /// Workspace roots derived from initialize params. + workspace_roots: Vec, + /// Files discovered under workspace roots. + workspace_known_files: Arc>>, + /// Files whose import entries should be (re)materialized on-demand. + workspace_dirty_files: Arc>>, + /// Whether workspace discovery has been performed at least once. + workspace_discovery_done: Arc, + /// Serializes workspace discovery/materialization for consistent graph reads. + workspace_graph_materialization_lock: Arc>, + /// Evaluator for runtime diagnostics (wrapped in Arc for sharing with async diagnostics). + evaluator: Option>, + /// Async diagnostics runner. + diagnostics: AsyncDiagnostics, + /// In-flight request tracker and response boundary. + inflight_requests: InflightRequests, + /// Channel for async request responses. + request_response_sender: Sender, + /// Channel for async request responses. + request_response_receiver: Receiver, + /// Shutdown requested flag. + shutdown_requested: bool, + /// Whether the client supports `workspace/inlayHint/refresh`. + client_supports_inlay_hint_refresh: bool, +} + +#[derive(Debug, Clone, Default, serde::Deserialize)] +#[serde(default)] +struct InitializeRoots { + #[serde(rename = "workspaceFolders")] + workspace_folders: Option>, + #[serde(rename = "rootUri")] + root_uri: Option, + #[serde(rename = "rootPath")] + root_path: Option, +} + +const WATCHED_FILE_GLOB_PATTERNS: [&str; 3] = ["**/*.jsonnet", "**/*.libsonnet", "**/*.json"]; + +pub(super) fn unique_files(files: impl IntoIterator) -> Vec { + let mut set = BTreeSet::new(); + set.extend(files); + set.into_iter().collect() +} + +impl Server { + /// Create a new server with the given connection. + #[must_use] + pub fn new(connection: Connection) -> Self { + let global_types = Arc::new(GlobalTyStore::new()); + let path_store = PathStore::new(); + let documents = Arc::new(DocumentManager::new( + Arc::clone(&global_types), + path_store.clone(), + )); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store); + let (request_response_sender, request_response_receiver) = crossbeam_channel::unbounded(); + let inflight_requests = InflightRequests::new(connection.sender.clone()); + let workspace_known_files = Arc::new(RwLock::new(FxHashSet::default())); + let workspace_dirty_files = Arc::new(RwLock::new(FxHashSet::default())); + let workspace_discovery_done = Arc::new(AtomicBool::new(false)); + let workspace_graph_materialization_lock = Arc::new(Mutex::new(())); + let diagnostics = AsyncDiagnostics::new(DiagnosticsConfig { + evaluator: None, + documents: Arc::clone(&documents), + import_graph: Arc::clone(&import_graph), + type_cache: Arc::clone(&type_cache), + global_types: Arc::clone(&global_types), + }); + + Self { + connection, + documents, + import_graph, + type_cache, + global_types, + config: Arc::new(RwLock::new(ServerConfig::default())), + workspace_roots: Vec::new(), + workspace_known_files, + workspace_dirty_files, + workspace_discovery_done, + workspace_graph_materialization_lock, + evaluator: None, + diagnostics, + inflight_requests, + request_response_sender, + request_response_receiver, + shutdown_requested: false, + client_supports_inlay_hint_refresh: false, + } + } + + /// Get a reference to the global type store. + pub fn global_types(&self) -> &Arc { + &self.global_types + } + + /// Update the configuration. + pub fn update_config(&self, new_config: ServerConfig) { + let mut config = self.config.write(); + config.merge(new_config); + } + + /// Invalidate the type cache for a file and all files that transitively import it. + /// + /// When a file changes, any cached types for files that depend on it may be stale, + /// so we invalidate the entire dependency chain. + fn invalidate_type_cache_with_dependents(&self, file: FileId) { + let dependents = self.import_graph.read().transitive_importers(file); + let mut cache = self.type_cache.write(); + cache.invalidate(file); + cache.invalidate_many(dependents.iter().copied()); + drop(cache); + + // Keep analysis cache consistent with type cache invalidation. + self.documents.invalidate_analysis_file(file); + for dependent in dependents { + self.documents.invalidate_analysis_file(dependent); + } + } + + fn async_request_context(&self) -> AsyncRequestContext { + AsyncRequestContext::new( + Arc::clone(&self.documents), + Arc::clone(&self.import_graph), + Arc::clone(&self.global_types), + Arc::clone(&self.type_cache), + Arc::clone(&self.config), + WorkspaceRequestState { + roots: self.workspace_roots.clone(), + known_files: Arc::clone(&self.workspace_known_files), + dirty_files: Arc::clone(&self.workspace_dirty_files), + discovery_done: Arc::clone(&self.workspace_discovery_done), + graph_materialization_lock: Arc::clone(&self.workspace_graph_materialization_lock), + }, + ) + } + + fn reconfigure_runtime_components(&mut self, config: &ServerConfig) { + self.evaluator = if config.enable_eval_diagnostics { + let eval_config = EvalConfig { + jpath: config.jpath.clone(), + resolve_paths_with_tanka: config.resolve_paths_with_tanka, + }; + Some(Arc::new(Evaluator::new(&eval_config))) + } else { + None + }; + + self.diagnostics = AsyncDiagnostics::new(DiagnosticsConfig { + evaluator: self.evaluator.clone(), + documents: Arc::clone(&self.documents), + import_graph: Arc::clone(&self.import_graph), + type_cache: Arc::clone(&self.type_cache), + global_types: Arc::clone(&self.global_types), + }); + } + + fn spawn_async_response(&self, id: RequestId, method: &'static str, compute: F) + where + F: FnOnce() -> Result + Send + 'static, + { + let sender = self.request_response_sender.clone(); + rayon::spawn(move || { + let response = match catch_unwind(AssertUnwindSafe(compute)) { + Ok(Ok(value)) => Response::new_ok(id, value), + Ok(Err(err)) => { + error!("Async handler failed for {}: {err:#}", method); + let response_error = + lsp_server::ResponseError::try_from(RequestError::AsyncHandlerFailed { + method: method.to_string(), + details: format!("{err:#}"), + }) + .unwrap_or_else(|serialize_error| { + error!( + "Failed to serialize async handler error for {}: {}", + method, serialize_error + ); + lsp_server::ResponseError { + code: lsp_server::ErrorCode::InternalError as i32, + message: format!("{method} failed"), + data: None, + } + }); + Response { + id, + result: None, + error: Some(response_error), + } + } + Err(_) => { + error!("Async handler panicked for {}", method); + let response_error = + lsp_server::ResponseError::try_from(RequestError::AsyncHandlerPanicked { + method: method.to_string(), + }) + .unwrap_or_else(|serialize_error| { + error!( + "Failed to serialize async panic error for {}: {}", + method, serialize_error + ); + lsp_server::ResponseError { + code: lsp_server::ErrorCode::InternalError as i32, + message: format!("{method} panicked"), + data: None, + } + }); + Response { + id, + result: None, + error: Some(response_error), + } + } + }; + if sender.send(response).is_err() { + debug!("Dropping async response for {}: channel closed", method); + } + }); + } + + /// Run the server, handling the initialize handshake first. + /// + /// # Errors + /// Returns an error if initialization fails, request/notification processing fails, + /// or message I/O over the LSP connection fails. + pub fn run(mut self) -> Result<()> { + info!("Starting jrsonnet language server"); + + // Handle initialize request + let (id, params, init_roots) = self.initialize()?; + self.client_supports_inlay_hint_refresh = Self::supports_inlay_hint_refresh(¶ms); + + // Parse initialization options into configuration + let init_config = + ServerConfig::from_initialization_options(params.initialization_options.clone()); + self.update_config(init_config.clone()); + info!( + "Configuration: jpath={:?}, eval_diagnostics={}, tanka_mode={}", + init_config.jpath, + init_config.enable_eval_diagnostics, + init_config.resolve_paths_with_tanka + ); + + self.reconfigure_runtime_components(&init_config); + debug!("Runtime components initialized"); + + // Send initialize result + let result = Self::initialize_result(); + + let result = serde_json::to_value(result)?; + self.connection + .sender + .send(Message::Response(Response::new_ok(id, result)))?; + + info!("Server initialized"); + + // Wait for initialized notification + match self.connection.receiver.recv() { + Ok(Message::Notification(n)) if n.method == "initialized" => { + info!("Received initialized notification"); + } + Ok(msg) => { + warn!("Expected initialized notification, got: {:?}", msg); + } + Err(e) => { + error!("Error receiving initialized notification: {}", e); + return Err(e.into()); + } + } + + self.register_did_change_watched_files(¶ms, &init_roots)?; + let workspace_roots = Self::workspace_root_paths(&init_roots); + self.workspace_roots.clone_from(&workspace_roots); + + // Main loop + self.main_loop()?; + + info!("Server shutting down"); + Ok(()) + } +} + +/// Run the LSP server over stdio. +/// +/// # Errors +/// Returns an error if server startup fails, the server loop returns an error, +/// or stdio worker threads fail to join. +pub fn run_stdio() -> Result<()> { + let (connection, io_threads) = Connection::stdio(); + let server = Server::new(connection); + server.run()?; + io_threads.join()?; + Ok(()) +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs new file mode 100644 index 00000000..53bfc4a0 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -0,0 +1,89 @@ +mod code_action; +mod code_lens; +mod commands; +mod completion; +mod document_highlight; +mod document_symbol; +mod formatting; +mod goto_declaration; +mod goto_definition; +mod goto_implementation; +mod goto_shared; +mod goto_type_definition; +mod hover; +mod import_graph_precision; +mod import_lookup; +mod inlay_hints; +mod prepare_rename; +mod references; +mod rename; +mod semantic_tokens_full; +mod semantic_tokens_range; +mod signature_help; +mod workspace_symbol; + +use std::{ + path::PathBuf, + sync::{atomic::AtomicBool, Arc}, +}; + +use jrsonnet_lsp_document::{CanonicalPath, Document, FileId}; +use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; +use jrsonnet_lsp_types::GlobalTyStore; +use parking_lot::{Mutex, RwLock}; +use rustc_hash::FxHashSet; + +use super::SharedConfig; + +#[derive(Clone)] +pub(super) struct WorkspaceRequestState { + pub(super) roots: Vec, + pub(super) known_files: Arc>>, + pub(super) dirty_files: Arc>>, + pub(super) discovery_done: Arc, + pub(super) graph_materialization_lock: Arc>, +} + +#[derive(Clone)] +pub(super) struct AsyncRequestContext { + documents: SharedDocumentManager, + import_graph: Arc>, + global_types: Arc, + type_cache: SharedTypeCache, + config: SharedConfig, + workspace: WorkspaceRequestState, +} + +impl AsyncRequestContext { + pub(super) fn new( + documents: SharedDocumentManager, + import_graph: Arc>, + global_types: Arc, + type_cache: SharedTypeCache, + config: SharedConfig, + workspace: WorkspaceRequestState, + ) -> Self { + Self { + documents, + import_graph, + global_types, + type_cache, + config, + workspace, + } + } + + /// Analyze a document with dependency-aware import resolution. + fn analyze_document(&self, path: &CanonicalPath, doc: &Document) -> Arc { + let version = doc.version(); + self.documents.get_or_compute_analysis(path, version, || { + let provider = TypeProvider::new( + Arc::clone(&self.type_cache), + Arc::clone(&self.import_graph), + Arc::clone(&self.global_types), + ); + provider.analyze(path, doc, self.documents.as_ref()) + }) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/code_action.rs b/crates/jrsonnet-lsp/src/server/async_requests/code_action.rs new file mode 100644 index 00000000..a715b676 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/code_action.rs @@ -0,0 +1,28 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{CodeActionParams, CodeActionResponse}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn code_action(&self, params: &CodeActionParams) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let actions = { + let doc = self.documents.get(&path)?; + let code_action_config = self.config.read().code_actions; + handlers::code_actions( + &doc, + uri, + params.range, + ¶ms.context, + &code_action_config, + ) + }; + if actions.is_empty() { + return None; + } + + Some(actions) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/code_lens.rs b/crates/jrsonnet-lsp/src/server/async_requests/code_lens.rs new file mode 100644 index 00000000..c2eab16f --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/code_lens.rs @@ -0,0 +1,20 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{CodeLens, CodeLensParams}; + +use super::AsyncRequestContext; +use crate::server::custom_operations; + +impl AsyncRequestContext { + pub(crate) fn code_lens(&self, params: &CodeLensParams) -> Option> { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let config = handlers::CodeLensConfig::all(); + let analysis = self.analyze_document(&path, &doc); + let mut lenses = handlers::code_lens(&doc, uri, &config, Some(&analysis)); + custom_operations::extend_code_lenses(self, &path, &doc, uri, &mut lenses); + Some(lenses) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs new file mode 100644 index 00000000..99d1ff08 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs @@ -0,0 +1,107 @@ +use jrsonnet_lsp_document::CanonicalPath; +use tracing::warn; + +use super::super::AsyncRequestContext; +use crate::analysis::eval::eval_import_roots_for_file; + +impl AsyncRequestContext { + pub(in crate::server) fn execute_eval_file(&self, uri: &str) -> Option { + use jrsonnet_evaluator::manifest::JsonFormat; + use jrsonnet_parser::{SourceFile, SourcePath}; + + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let path = CanonicalPath::from_uri(&uri_parsed).ok()?; + let text = self.documents.get_text(&path)?; + + let jpath = self.eval_command_jpath(Some(&path)); + let state = crate::analysis::eval::create_state_with_jpath(&jpath); + + let source_path = SourcePath::new(SourceFile::new(path.as_path().to_path_buf())); + + match state.evaluate_snippet(source_path.to_string(), &text) { + Ok(val) => { + let json_format = JsonFormat::default(); + match val.manifest(json_format) { + Ok(json_str) => match serde_json::from_str::(&json_str) { + Ok(json) => Some(json), + Err(e) => { + warn!("Failed to parse manifest result as JSON: {}", e); + Some(serde_json::Value::String(json_str)) + } + }, + Err(e) => { + warn!("Failed to manifest: {}", e); + Some(serde_json::json!({ + "error": format!("Manifest error: {}", e.error()) + })) + } + } + } + Err(e) => { + warn!("Evaluation failed: {}", e); + Some(serde_json::json!({ + "error": format!("Evaluation error: {}", e.error()) + })) + } + } + } + + pub(in crate::server) fn execute_eval_expression( + &self, + expr: &str, + base_uri: Option<&str>, + ) -> serde_json::Value { + use jrsonnet_evaluator::manifest::JsonFormat; + use jrsonnet_parser::{SourceFile, SourcePath}; + + let base_path = base_uri + .and_then(|uri| uri.parse::().ok()) + .and_then(|uri| CanonicalPath::from_uri(&uri).ok()); + let jpath = self.eval_command_jpath(base_path.as_ref()); + let state = crate::analysis::eval::create_state_with_jpath(&jpath); + let source_name = base_path.map_or_else( + || "".to_string(), + |path| SourcePath::new(SourceFile::new(path.as_path().to_path_buf())).to_string(), + ); + + match state.evaluate_snippet(source_name, expr) { + Ok(val) => { + let json_format = JsonFormat::default(); + match val.manifest(json_format) { + Ok(json_str) => match serde_json::from_str::(&json_str) { + Ok(json) => json, + Err(e) => { + warn!("Failed to parse manifest result as JSON: {}", e); + serde_json::Value::String(json_str) + } + }, + Err(e) => serde_json::json!({ + "error": format!("Manifest error: {}", e.error()) + }), + } + } + Err(e) => serde_json::json!({ + "error": format!("Evaluation error: {}", e.error()) + }), + } + } + + pub(super) fn eval_command_jpath( + &self, + base_path: Option<&CanonicalPath>, + ) -> Vec { + let config = self.config.read(); + let jpath = base_path.map_or_else( + || config.jpath.clone(), + |base_path| { + eval_import_roots_for_file( + base_path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ) + }, + ); + drop(config); + jpath + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs new file mode 100644 index 00000000..30503843 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs @@ -0,0 +1,30 @@ +use jrsonnet_lsp_document::CanonicalPath; + +use super::super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(in crate::server) fn execute_find_transitive_importers( + &self, + uri: &str, + ) -> Option { + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let path = CanonicalPath::from_uri(&uri_parsed).ok()?; + let importers = self.ensure_precise_transitive_importers(&path); + let import_graph = self.import_graph.read(); + let mut importer_uris: Vec = importers + .iter() + .filter_map(|file| { + import_graph + .path(*file) + .and_then(|path| path.to_uri().ok().map(|uri| uri.to_string())) + }) + .collect(); + drop(import_graph); + importer_uris.sort(); + + Some(serde_json::json!({ + "file": uri, + "transitiveImporters": importer_uris + })) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/mod.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/mod.rs new file mode 100644 index 00000000..29a5e8ca --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/mod.rs @@ -0,0 +1,2 @@ +mod eval; +mod graph; diff --git a/crates/jrsonnet-lsp/src/server/async_requests/completion.rs b/crates/jrsonnet-lsp/src/server/async_requests/completion.rs new file mode 100644 index 00000000..b8b54963 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/completion.rs @@ -0,0 +1,36 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{CompletionParams, CompletionResponse}; + +use super::AsyncRequestContext; +use crate::analysis::tanka::effective_import_roots; + +impl AsyncRequestContext { + pub(crate) fn completion(&self, params: &CompletionParams) -> Option { + let uri = ¶ms.text_document_position.text_document.uri; + let position = params.text_document_position.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + let semantic = self.documents.get_semantic_artifacts(&path); + + let lsp_pos = position.into(); + let analysis = self.analyze_document(&path, &doc); + let config = self.config.read(); + let import_roots = effective_import_roots( + path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + drop(config); + + let list = handlers::completion_with_import_roots_and_semantic( + &doc, + lsp_pos, + Some(path.as_path()), + &import_roots, + &analysis, + semantic.as_deref(), + )?; + Some(CompletionResponse::List(list)) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/document_highlight.rs b/crates/jrsonnet-lsp/src/server/async_requests/document_highlight.rs new file mode 100644 index 00000000..324850ed --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/document_highlight.rs @@ -0,0 +1,25 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{DocumentHighlight, DocumentHighlightParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn document_highlight( + &self, + params: &DocumentHighlightParams, + ) -> Option> { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + let lsp_pos = position.into(); + + let highlights = handlers::document_highlights(&doc, lsp_pos); + if highlights.is_empty() { + return None; + } + + Some(highlights) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/document_symbol.rs b/crates/jrsonnet-lsp/src/server/async_requests/document_symbol.rs new file mode 100644 index 00000000..9dae8aba --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/document_symbol.rs @@ -0,0 +1,19 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{DocumentSymbolParams, DocumentSymbolResponse}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn document_symbol( + &self, + params: &DocumentSymbolParams, + ) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let symbols = handlers::document_symbols(&doc); + Some(DocumentSymbolResponse::Nested(symbols)) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs b/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs new file mode 100644 index 00000000..a32baa68 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs @@ -0,0 +1,200 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{ + DocumentFormattingParams, DocumentRangeFormattingParams, FormattingOptions, TextEdit, +}; + +use super::AsyncRequestContext; + +fn formatting_config_for_request( + base: &handlers::FormattingConfig, + options: &FormattingOptions, +) -> handlers::FormattingConfig { + let mut config = base.clone(); + config.indent = if options.insert_spaces { + u8::try_from(options.tab_size).unwrap_or(u8::MAX) + } else { + 0 + }; + if let Some(trim_trailing_whitespace) = options.trim_trailing_whitespace { + config.trim_trailing_whitespace = trim_trailing_whitespace; + } + if let Some(trim_final_newlines) = options.trim_final_newlines { + config.trim_final_newlines = trim_final_newlines; + } + config +} + +fn apply_text_options(mut formatted: String, options: &FormattingOptions) -> String { + match options.insert_final_newline { + Some(true) => { + if !formatted.ends_with('\n') { + formatted.push('\n'); + } + } + Some(false) => { + formatted.truncate(formatted.trim_end_matches('\n').len()); + } + None => {} + } + + formatted +} + +fn apply_text_options_to_edits(edits: &mut [TextEdit], options: &FormattingOptions) { + for edit in edits { + edit.new_text = apply_text_options(std::mem::take(&mut edit.new_text), options); + } +} + +impl AsyncRequestContext { + pub(crate) fn formatting(&self, params: &DocumentFormattingParams) -> Option> { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let config = formatting_config_for_request(&self.config.read().formatting, ¶ms.options); + let mut edits = handlers::format_document_with_config(doc.text(), &config)?; + apply_text_options_to_edits(&mut edits, ¶ms.options); + + Some(edits) + } + + pub(crate) fn formatting_range( + &self, + params: &DocumentRangeFormattingParams, + ) -> Option> { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let config = formatting_config_for_request(&self.config.read().formatting, ¶ms.options); + handlers::format_document_range_with_config(doc.text(), params.range, &config) + } +} + +#[cfg(test)] +mod tests { + use lsp_types::FormattingOptions; + use rstest::rstest; + + use super::{apply_text_options, formatting_config_for_request}; + + #[rstest] + #[case(true, 2, 2)] + #[case(true, 300, u8::MAX)] + #[case(false, 8, 0)] + fn test_formatting_options_control_indent( + #[case] insert_spaces: bool, + #[case] tab_size: u32, + #[case] expected_indent: u8, + ) { + let base = jrsonnet_lsp_handlers::FormattingConfig { + indent: 7, + ..jrsonnet_lsp_handlers::FormattingConfig::default() + }; + let options = FormattingOptions { + tab_size, + insert_spaces, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: None, + trim_final_newlines: None, + }; + + let merged = formatting_config_for_request(&base, &options); + assert_eq!(merged.indent, expected_indent); + assert_eq!(merged.max_blank_lines, base.max_blank_lines); + } + + #[rstest] + #[case(None, false, false)] + #[case(Some(true), false, true)] + #[case(Some(false), true, false)] + fn test_formatting_options_control_trim_trailing_whitespace( + #[case] request_trim: Option, + #[case] base_trim: bool, + #[case] expected_trim: bool, + ) { + let base = jrsonnet_lsp_handlers::FormattingConfig { + trim_trailing_whitespace: base_trim, + ..jrsonnet_lsp_handlers::FormattingConfig::default() + }; + let options = FormattingOptions { + tab_size: 2, + insert_spaces: true, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: request_trim, + insert_final_newline: None, + trim_final_newlines: None, + }; + + let merged = formatting_config_for_request(&base, &options); + assert_eq!(merged.trim_trailing_whitespace, expected_trim); + } + + #[rstest] + #[case(None, false, false)] + #[case(Some(true), false, true)] + #[case(Some(false), true, false)] + fn test_formatting_options_control_trim_final_newlines( + #[case] request_trim: Option, + #[case] base_trim: bool, + #[case] expected_trim: bool, + ) { + let base = jrsonnet_lsp_handlers::FormattingConfig { + trim_final_newlines: base_trim, + ..jrsonnet_lsp_handlers::FormattingConfig::default() + }; + let options = FormattingOptions { + tab_size: 2, + insert_spaces: true, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: None, + trim_final_newlines: request_trim, + }; + + let merged = formatting_config_for_request(&base, &options); + assert_eq!(merged.trim_final_newlines, expected_trim); + } + + #[test] + fn test_insert_final_newline_true_appends_newline() { + let options = FormattingOptions { + tab_size: 2, + insert_spaces: true, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: Some(true), + trim_final_newlines: None, + }; + assert_eq!(apply_text_options("{}".to_string(), &options), "{}\n"); + } + + #[test] + fn test_insert_final_newline_false_removes_trailing_newlines() { + let options = FormattingOptions { + tab_size: 2, + insert_spaces: true, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: Some(false), + trim_final_newlines: None, + }; + assert_eq!(apply_text_options("{}\n\n".to_string(), &options), "{}"); + } + + #[test] + fn test_trim_final_newlines_with_insert_final_newline_true_keeps_one() { + let options = FormattingOptions { + tab_size: 2, + insert_spaces: true, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: Some(true), + trim_final_newlines: Some(true), + }; + assert_eq!(apply_text_options("{}".to_string(), &options), "{}\n"); + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/goto_declaration.rs b/crates/jrsonnet-lsp/src/server/async_requests/goto_declaration.rs new file mode 100644 index 00000000..02603d02 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/goto_declaration.rs @@ -0,0 +1,12 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use super::{goto_shared::GotoTarget, AsyncRequestContext}; + +impl AsyncRequestContext { + pub(crate) fn goto_declaration( + &self, + params: &GotoDefinitionParams, + ) -> Option { + self.goto_target(params, GotoTarget::Declaration) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/goto_definition.rs b/crates/jrsonnet-lsp/src/server/async_requests/goto_definition.rs new file mode 100644 index 00000000..78e388bf --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/goto_definition.rs @@ -0,0 +1,12 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use super::{goto_shared::GotoTarget, AsyncRequestContext}; + +impl AsyncRequestContext { + pub(crate) fn goto_definition( + &self, + params: &GotoDefinitionParams, + ) -> Option { + self.goto_target(params, GotoTarget::Definition) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/goto_implementation.rs b/crates/jrsonnet-lsp/src/server/async_requests/goto_implementation.rs new file mode 100644 index 00000000..16dd2591 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/goto_implementation.rs @@ -0,0 +1,12 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use super::{goto_shared::GotoTarget, AsyncRequestContext}; + +impl AsyncRequestContext { + pub(crate) fn goto_implementation( + &self, + params: &GotoDefinitionParams, + ) -> Option { + self.goto_target(params, GotoTarget::Implementation) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/goto_shared.rs b/crates/jrsonnet-lsp/src/server/async_requests/goto_shared.rs new file mode 100644 index 00000000..77d8aefb --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/goto_shared.rs @@ -0,0 +1,82 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Location}; + +use super::AsyncRequestContext; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub(super) enum GotoTarget { + Definition, + TypeDefinition, + Declaration, + Implementation, +} + +impl AsyncRequestContext { + pub(super) fn goto_target( + &self, + params: &GotoDefinitionParams, + target: GotoTarget, + ) -> Option { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + let lsp_pos = position.into(); + let semantic = self.documents.get_semantic_artifacts(&path); + + let result = match target { + GotoTarget::Definition | GotoTarget::TypeDefinition => { + handlers::goto_definition_with_semantic(&doc, lsp_pos, semantic.as_deref())? + } + GotoTarget::Declaration | GotoTarget::Implementation => { + handlers::goto_declaration_with_semantic(&doc, lsp_pos, semantic.as_deref())? + } + }; + match result { + handlers::DefinitionResult::Local(range) => { + let range = if target == GotoTarget::Implementation { + Self::local_implementation_range(&doc, range).unwrap_or(range) + } else { + range + }; + Some(GotoDefinitionResponse::Scalar(Location { + uri: uri.clone(), + range, + })) + } + handlers::DefinitionResult::Import(import_path) => { + let resolved = self.resolve_import_from_graph(&path, &import_path)?; + let resolved_uri = resolved.to_uri().ok()?; + let range = self.document_root_expr_range(&resolved).unwrap_or_default(); + Some(GotoDefinitionResponse::Scalar(Location { + uri: resolved_uri, + range, + })) + } + handlers::DefinitionResult::ImportField { + path: import_path, + fields, + } => { + let resolved = self.resolve_import_from_graph(&path, &import_path)?; + let resolved_uri = resolved.to_uri().ok()?; + let locations = self.find_field_in_file(&resolved, &fields); + let range = if target == GotoTarget::Implementation { + locations + .map(|location| location.implementation) + .or_else(|| self.find_export_binding_in_file(&resolved, &fields)) + .or_else(|| self.document_root_expr_range(&resolved)) + .unwrap_or_default() + } else { + locations + .map(|location| location.declaration) + .or_else(|| self.find_export_binding_in_file(&resolved, &fields))? + }; + Some(GotoDefinitionResponse::Scalar(Location { + uri: resolved_uri, + range, + })) + } + } + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/goto_type_definition.rs b/crates/jrsonnet-lsp/src/server/async_requests/goto_type_definition.rs new file mode 100644 index 00000000..2867af79 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/goto_type_definition.rs @@ -0,0 +1,12 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use super::{goto_shared::GotoTarget, AsyncRequestContext}; + +impl AsyncRequestContext { + pub(crate) fn goto_type_definition( + &self, + params: &GotoDefinitionParams, + ) -> Option { + self.goto_target(params, GotoTarget::TypeDefinition) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/hover.rs b/crates/jrsonnet-lsp/src/server/async_requests/hover.rs new file mode 100644 index 00000000..6e697a59 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/hover.rs @@ -0,0 +1,25 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{Hover, HoverParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn hover(&self, params: &HoverParams) -> Option { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + let lsp_pos = position.into(); + let analysis = self.analyze_document(&path, &doc); + let import_field_type_resolver = |import_path: &str, fields: &[String]| { + self.resolve_import_field_type(&path, import_path, fields) + }; + handlers::hover_with_import_field_type( + &doc, + lsp_pos, + &analysis, + Some(&import_field_type_resolver), + ) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_graph_precision.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_graph_precision.rs new file mode 100644 index 00000000..2ec0e0fd --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_graph_precision.rs @@ -0,0 +1,172 @@ +use std::sync::atomic::Ordering; + +use jrsonnet_lsp_document::{CanonicalPath, FileId}; +use jrsonnet_lsp_import::{ImportParseMode, ImportResolution}; +use rayon::prelude::*; +use tracing::debug; + +use super::AsyncRequestContext; +use crate::{ + analysis::tanka::effective_import_roots, + server::workspace_index::collect_workspace_files_for_roots, +}; + +impl AsyncRequestContext { + pub(super) fn ensure_workspace_discovered(&self) { + if self.workspace.discovery_done.load(Ordering::Acquire) { + return; + } + + let _guard = self.workspace.graph_materialization_lock.lock(); + if self.workspace.discovery_done.load(Ordering::Acquire) { + return; + } + if self.workspace.roots.is_empty() { + self.workspace.discovery_done.store(true, Ordering::Release); + return; + } + + let discovered_paths = collect_workspace_files_for_roots(&self.workspace.roots); + let discovered_files = discovered_paths + .into_iter() + .map(|path| self.documents.intern(&path)) + .collect::>(); + + { + let mut known = self.workspace.known_files.write(); + known.extend(discovered_files.iter().copied()); + } + { + let mut dirty = self.workspace.dirty_files.write(); + dirty.extend( + discovered_files + .iter() + .copied() + .filter(|file| !self.documents.is_open_file(*file)), + ); + } + + self.workspace.discovery_done.store(true, Ordering::Release); + debug!("Discovered {} workspace files", discovered_files.len()); + } + + pub(super) fn ensure_workspace_graph_materialized(&self) { + self.ensure_workspace_discovered(); + self.materialize_workspace_dirty_files(); + } + + pub(super) fn ensure_file_materialized(&self, path: &CanonicalPath) -> Option { + self.ensure_workspace_discovered(); + + let file = self.documents.intern(path); + self.workspace.known_files.write().insert(file); + + let is_tracked = self.import_graph.read().parse_mode(file).is_some(); + if !is_tracked { + if self.documents.is_open_file(file) { + self.reparse_files_precisely(&[file]); + } else { + self.workspace.dirty_files.write().insert(file); + } + } + + self.materialize_workspace_dirty_files(); + self.import_graph.read().file(path) + } + + pub(super) fn ensure_precise_transitive_importers(&self, path: &CanonicalPath) -> Vec { + self.ensure_workspace_graph_materialized(); + let Some(root_file) = self.ensure_file_materialized(path) else { + return Vec::new(); + }; + + let importers = self.import_graph.read().transitive_importers(root_file); + self.ensure_precise_import_graph_files(&importers); + self.import_graph.read().transitive_importers(root_file) + } + + fn materialize_workspace_dirty_files(&self) { + let _guard = self.workspace.graph_materialization_lock.lock(); + let dirty_files = { + let mut dirty = self.workspace.dirty_files.write(); + if dirty.is_empty() { + return; + } + dirty.drain().collect::>() + }; + + self.reparse_files_with_mode(&dirty_files, false); + } + + fn reparse_files_precisely(&self, files: &[FileId]) { + self.reparse_files_with_mode(files, true); + } + + fn reparse_files_with_mode(&self, files: &[FileId], precise_only: bool) { + if files.is_empty() { + return; + } + + let (jpath, resolve_paths_with_tanka) = { + let config = self.config.read(); + (config.jpath.clone(), config.resolve_paths_with_tanka) + }; + + let parsed = files + .par_iter() + .map(|file| { + let Some(path) = self.documents.path(*file) else { + return (*file, None); + }; + let Some(doc) = self.documents.get_document_file(*file) else { + return (*file, None); + }; + + let parse_mode = if precise_only || self.documents.is_open_file(*file) { + ImportParseMode::Precise + } else { + ImportParseMode::Approximate + }; + + let import_roots = effective_import_roots( + path.as_ref().as_path(), + &jpath, + resolve_paths_with_tanka, + ); + let import_resolution = ImportResolution::new(path.as_ref(), &import_roots); + let entries = match parse_mode { + ImportParseMode::Precise => import_resolution.parse_entries(&doc), + ImportParseMode::Approximate => { + import_resolution.parse_entries_approximate(&doc) + } + }; + (*file, Some((entries, parse_mode))) + }) + .collect::>(); + + let mut graph = self.import_graph.write(); + for (file, parsed_entry) in parsed { + if let Some((entries, parse_mode)) = parsed_entry { + graph.update_file_with_entries_mode(file, entries, parse_mode); + } else { + graph.remove_file(file); + } + } + } + + fn ensure_precise_import_graph_files(&self, files: &[FileId]) { + let to_upgrade = { + let graph = self.import_graph.read(); + files + .iter() + .copied() + .filter(|file| !graph.is_precise(*file)) + .collect::>() + }; + if to_upgrade.is_empty() { + return; + } + + self.reparse_files_precisely(&to_upgrade); + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/document.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/document.rs new file mode 100644 index 00000000..1c428a91 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/document.rs @@ -0,0 +1,73 @@ +use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, Document, LspRange}; +use jrsonnet_rowan_parser::AstNode; + +use super::super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(in crate::server::async_requests) fn load_document_for_path( + &self, + path: &CanonicalPath, + ) -> Option { + self.documents.get_document(path) + } + + pub(in crate::server::async_requests) fn document_root_expr_range( + &self, + path: &CanonicalPath, + ) -> Option { + let doc = self.load_document_for_path(path)?; + let expr = doc.ast().expr()?; + Some(to_lsp_range( + expr.syntax().text_range(), + doc.line_index(), + doc.text(), + )) + } + + pub(in crate::server::async_requests) fn local_implementation_range( + document: &Document, + declaration: lsp_types::Range, + ) -> Option { + use jrsonnet_rowan_parser::{ + nodes::{Bind, ForSpec, Param}, + AstNode, + }; + + let text = document.text(); + let line_index = document.line_index(); + let declaration_range = line_index.text_range(LspRange::from(declaration), text)?; + let ast = document.ast(); + let node = ast + .syntax() + .descendants() + .find(|candidate| candidate.text_range() == declaration_range)?; + + if let Some(bind) = node.ancestors().find_map(Bind::cast) { + let value_range = match bind { + Bind::BindDestruct(bind) => bind.value()?.syntax().text_range(), + Bind::BindFunction(bind) => bind.value()?.syntax().text_range(), + }; + return Some(to_lsp_range(value_range, line_index, text)); + } + + if let Some(param) = node.ancestors().find_map(Param::cast) { + let default_value = param.expr()?; + return Some(to_lsp_range( + default_value.syntax().text_range(), + line_index, + text, + )); + } + + if let Some(for_spec) = node.ancestors().find_map(ForSpec::cast) { + let source_expr = for_spec.expr()?; + return Some(to_lsp_range( + source_expr.syntax().text_range(), + line_index, + text, + )); + } + + None + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/mod.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/mod.rs new file mode 100644 index 00000000..40164bf5 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/mod.rs @@ -0,0 +1,10 @@ +mod document; +mod resolve; +mod symbols; +mod type_lookup; + +#[derive(Debug, Clone, Copy)] +pub(super) struct ImportedFieldLocations { + pub(super) declaration: lsp_types::Range, + pub(super) implementation: lsp_types::Range, +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/resolve.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/resolve.rs new file mode 100644 index 00000000..c1604572 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/resolve.rs @@ -0,0 +1,45 @@ +use jrsonnet_lsp_document::CanonicalPath; + +use super::super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(in crate::server::async_requests) fn resolve_import_from_graph( + &self, + from: &CanonicalPath, + import: &str, + ) -> Option { + let import_graph = self.import_graph.read(); + let from_file = import_graph.file(from)?; + import_graph + .resolved_import(from_file, import) + .and_then(|file| import_graph.path(file)) + .map(|path| path.as_ref().clone()) + } + + fn resolve_import_from_fs(from: &CanonicalPath, import: &str) -> Option { + let import_path = std::path::Path::new(import); + let candidate = if import_path.is_absolute() { + import_path.to_path_buf() + } else if import.starts_with("./") || import.starts_with("../") { + from.as_path().parent()?.join(import_path) + } else { + return None; + }; + + CanonicalPath::try_from_path(&candidate).ok() + } + + pub(in crate::server::async_requests) fn resolve_import_path( + &self, + from: &CanonicalPath, + import: &str, + ) -> Option { + if import.starts_with("./") || import.starts_with("../") || import.starts_with('/') { + return Self::resolve_import_from_fs(from, import) + .or_else(|| self.resolve_import_from_graph(from, import)); + } + + self.resolve_import_from_graph(from, import) + .or_else(|| Self::resolve_import_from_fs(from, import)) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/symbols.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/symbols.rs new file mode 100644 index 00000000..27a0e9c0 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/symbols.rs @@ -0,0 +1,143 @@ +use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath}; +use jrsonnet_lsp_handlers as handlers; +use jrsonnet_rowan_parser::{ + nodes::{FieldName, Member, ObjBody}, + AstNode, AstToken, SyntaxKind, +}; + +use super::{super::AsyncRequestContext, ImportedFieldLocations}; + +impl AsyncRequestContext { + pub(in crate::server::async_requests) fn find_export_binding_in_file( + &self, + path: &CanonicalPath, + fields: &[String], + ) -> Option { + let [field_name] = fields else { + return None; + }; + + let doc = self.load_document_for_path(path)?; + let text = doc.text(); + let line_index = doc.line_index(); + + doc.ast() + .syntax() + .descendants_with_tokens() + .filter_map(jrsonnet_rowan_parser::rowan::NodeOrToken::into_token) + .filter(|token| token.kind() == SyntaxKind::IDENT && token.text() == field_name) + .find_map(|token| { + let position = line_index.position(token.text_range().start().into(), text)?; + match handlers::goto_definition(&doc, position) { + Some(handlers::DefinitionResult::Local(range)) => Some(range), + Some( + handlers::DefinitionResult::Import(_) + | handlers::DefinitionResult::ImportField { .. }, + ) + | None => None, + } + }) + } + + /// For a field chain like `foo.bar`, this finds the `bar` field + /// inside the `foo` field of the top-level object. + pub(in crate::server::async_requests) fn find_field_in_file( + &self, + path: &CanonicalPath, + fields: &[String], + ) -> Option { + use jrsonnet_rowan_parser::nodes::ExprBase; + + let doc = self.load_document_for_path(path)?; + + let ast = doc.ast(); + let text = doc.text(); + let line_index = doc.line_index(); + let expr = ast.expr()?; + + let expr_base = expr.expr_base()?; + let ExprBase::ExprObject(obj) = expr_base else { + return None; + }; + let mut current_obj_body = obj.obj_body()?; + + for (i, field_name) in fields.iter().enumerate() { + let is_last = i == fields.len() - 1; + let ObjBody::ObjBodyMemberList(members) = ¤t_obj_body else { + return None; + }; + + let field_target = members.members().find_map(|member| match member { + Member::MemberFieldNormal(field) => { + let name_node = field.field_name()?; + let name = extract_field_name_string(&name_node)?; + if name != *field_name { + return None; + } + + let declaration = name_node.syntax().text_range(); + let value = field.expr()?; + let implementation = value.syntax().text_range(); + let next_body = value.expr_base().and_then(|base| { + let ExprBase::ExprObject(obj) = base else { + return None; + }; + obj.obj_body() + }); + + Some((declaration, implementation, next_body)) + } + Member::MemberFieldMethod(method) => { + let name_node = method.field_name()?; + let name = extract_field_name_string(&name_node)?; + if name != *field_name { + return None; + } + + let declaration = name_node.syntax().text_range(); + let implementation = method + .expr() + .map_or(declaration, |expr| expr.syntax().text_range()); + Some((declaration, implementation, None)) + } + Member::MemberBindStmt(_) | Member::MemberAssertStmt(_) => None, + })?; + + if is_last { + let declaration = to_lsp_range(field_target.0, line_index, text); + let implementation = to_lsp_range(field_target.1, line_index, text); + return Some(ImportedFieldLocations { + declaration, + implementation, + }); + } + + current_obj_body = field_target.2?; + } + + None + } +} + +fn extract_field_name_string(name: &FieldName) -> Option { + match name { + FieldName::FieldNameFixed(fixed) => { + if let Some(name_node) = fixed.id() { + if let Some(ident) = name_node.ident_lit() { + return Some(ident.text().to_string()); + } + } + if let Some(text) = fixed.text() { + let s = text.syntax().text(); + let name = s + .trim_start_matches('"') + .trim_start_matches('\'') + .trim_end_matches('"') + .trim_end_matches('\''); + return Some(name.to_string()); + } + None + } + FieldName::FieldNameDynamic(_) => None, + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/type_lookup.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/type_lookup.rs new file mode 100644 index 00000000..6e971c7f --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/type_lookup.rs @@ -0,0 +1,67 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_types::{Ty, TyData}; + +use super::super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(in crate::server::async_requests) fn resolve_import_field_type( + &self, + from: &CanonicalPath, + import_path: &str, + fields: &[String], + ) -> Option { + let resolved = self.resolve_import_path(from, import_path)?; + let doc = self.load_document_for_path(&resolved)?; + let analysis = self.analyze_document(&resolved, &doc); + let ty = Self::type_for_field_path(&analysis, analysis.document_type(), fields)?; + Some(analysis.display_for_hover(ty)) + } + + fn type_for_field_path( + analysis: &jrsonnet_lsp_inference::TypeAnalysis, + root_ty: Ty, + fields: &[String], + ) -> Option { + fields.iter().try_fold(root_ty, |ty, field| { + Self::type_for_field(analysis, ty, field) + }) + } + + fn type_for_field( + analysis: &jrsonnet_lsp_inference::TypeAnalysis, + ty: Ty, + field: &str, + ) -> Option { + match analysis.get_data(ty) { + TyData::Any => Some(Ty::ANY), + TyData::Object(obj) => obj + .get_field(field) + .map(|field_def| field_def.ty) + .or_else(|| obj.has_unknown.then_some(Ty::ANY)), + TyData::AttrsOf { value } => Some(value), + TyData::Union(types) => { + let variants: Vec<_> = types + .into_iter() + .filter_map(|variant| Self::type_for_field(analysis, variant, field)) + .collect(); + if variants.is_empty() { + None + } else { + Some(analysis.union(variants)) + } + } + TyData::Sum(types) => { + let variants: Vec<_> = types + .into_iter() + .filter_map(|variant| Self::type_for_field(analysis, variant, field)) + .collect(); + if variants.is_empty() { + None + } else { + Some(analysis.union(variants)) + } + } + _ => None, + } + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/inlay_hints.rs b/crates/jrsonnet-lsp/src/server/async_requests/inlay_hints.rs new file mode 100644 index 00000000..963a04f0 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/inlay_hints.rs @@ -0,0 +1,20 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{InlayHint, InlayHintParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn inlay_hints(&self, params: &InlayHintParams) -> Option> { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + let analysis = self.analyze_document(&path, &doc); + let config = self.config.read().inlay_hints; + let hints = handlers::inlay_hints_with_config(&doc, &analysis, params.range, &config); + if hints.is_empty() { + return None; + } + Some(hints) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/prepare_rename.rs b/crates/jrsonnet-lsp/src/server/async_requests/prepare_rename.rs new file mode 100644 index 00000000..06b251ed --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/prepare_rename.rs @@ -0,0 +1,21 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{PrepareRenameResponse, TextDocumentPositionParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn prepare_rename( + &self, + params: &TextDocumentPositionParams, + ) -> Option { + let uri = ¶ms.text_document.uri; + let position = params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let lsp_pos = position.into(); + + handlers::prepare_rename(&doc, lsp_pos) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/references.rs b/crates/jrsonnet-lsp/src/server/async_requests/references.rs new file mode 100644 index 00000000..b8572b63 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/references.rs @@ -0,0 +1,229 @@ +use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, Document}; +use jrsonnet_lsp_handlers as handlers; +use jrsonnet_lsp_inference::SemanticArtifacts; +use jrsonnet_rowan_parser::{ + nodes::{ExprBase, ExprField}, + rowan::TextRange, + AstNode, +}; +use lsp_types::{Location, ReferenceParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn references(&self, params: &ReferenceParams) -> Option> { + let uri = ¶ms.text_document_position.text_document.uri; + let position = params.text_document_position.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get_document(&path)?; + let lsp_pos = position.into(); + let semantic = self.documents.get_semantic_artifacts(&path); + + let include_declaration = params.context.include_declaration; + let mut refs = handlers::find_references_with_semantic( + &doc, + lsp_pos, + uri, + include_declaration, + semantic.as_deref(), + ); + + let importers = self.ensure_precise_transitive_importers(&path); + + let importer_docs: Vec<_> = importers + .into_iter() + .filter_map(|file| { + let path = self.documents.path(file)?; + let doc = self.documents.get_document_file(file)?; + let semantic = self.documents.get_semantic_artifacts(path.as_ref()); + Some((path.as_ref().clone(), doc, semantic)) + }) + .collect(); + let importer_refs: Vec<_> = importer_docs + .iter() + .map(|(k, v, semantic)| (k, v, semantic.as_deref())) + .collect(); + + let cross_refs = { + let import_graph = self.import_graph.read(); + handlers::find_cross_file_references_with_semantic( + &doc, + &path, + lsp_pos, + semantic.as_deref(), + &importer_refs, + &import_graph, + ) + }; + refs.extend(cross_refs); + + if refs.is_empty() { + if let Some(import_member_refs) = self.references_for_import_member_use_site( + &doc, + &path, + lsp_pos, + include_declaration, + semantic.as_deref(), + ) { + refs.extend(import_member_refs); + } + } + + if refs.is_empty() { + return None; + } + Some(refs) + } + + fn references_for_import_member_use_site( + &self, + document: &Document, + path: &CanonicalPath, + position: jrsonnet_lsp_document::LspPosition, + include_declaration: bool, + semantic: Option<&SemanticArtifacts>, + ) -> Option> { + let handlers::DefinitionResult::ImportField { + path: import_path, + fields, + } = handlers::goto_definition_with_semantic(document, position, semantic)? + else { + return None; + }; + + let target_path = self.resolve_import_path(path, &import_path)?; + let declaration = self + .find_field_in_file(&target_path, &fields) + .map(|locations| locations.declaration) + .or_else(|| self.find_export_binding_in_file(&target_path, &fields))?; + self.collect_import_member_references( + &target_path, + declaration, + &fields, + include_declaration, + ) + } + + pub(super) fn collect_import_member_references( + &self, + target_path: &CanonicalPath, + declaration: lsp_types::Range, + fields: &[String], + include_declaration: bool, + ) -> Option> { + let declaration_pos = declaration.start.into(); + + let target_doc = self.documents.get_document(target_path)?; + let target_uri = target_path.to_uri().ok()?; + let target_semantic = self.documents.get_semantic_artifacts(target_path); + let mut refs = handlers::find_references_with_semantic( + &target_doc, + declaration_pos, + &target_uri, + include_declaration, + target_semantic.as_deref(), + ); + + let importers = self.ensure_precise_transitive_importers(target_path); + let import_graph = self.import_graph.read(); + let Some(target_file) = import_graph.file(target_path) else { + return (!refs.is_empty()).then_some(refs); + }; + + for importer_file in importers { + let Some(importer_doc) = self.documents.get_document_file(importer_file) else { + continue; + }; + let Some(importer_path) = self.documents.path(importer_file) else { + continue; + }; + let Some(importer_graph_file) = import_graph.file(importer_path.as_ref()) else { + continue; + }; + + let mut binding_names: Vec = import_graph + .imports_of_target(importer_graph_file, target_file) + .into_iter() + .filter_map(|entry| entry.binding_name.clone()) + .collect(); + binding_names.sort(); + binding_names.dedup(); + + if binding_names.is_empty() { + continue; + } + + let Ok(importer_uri) = importer_path.as_ref().to_uri() else { + continue; + }; + + for binding_name in binding_names { + for range in + find_import_member_chain_references(&importer_doc, &binding_name, fields) + { + refs.push(Location { + uri: importer_uri.clone(), + range: to_lsp_range(range, importer_doc.line_index(), importer_doc.text()), + }); + } + } + } + drop(import_graph); + + let mut deduped = Vec::with_capacity(refs.len()); + for reference in refs { + if deduped.iter().any(|existing: &Location| { + existing.uri == reference.uri && existing.range == reference.range + }) { + continue; + } + deduped.push(reference); + } + + (!deduped.is_empty()).then_some(deduped) + } +} + +fn find_import_member_chain_references( + document: &Document, + binding_name: &str, + fields: &[String], +) -> Vec { + if fields.is_empty() { + return Vec::new(); + } + + document + .ast() + .syntax() + .descendants() + .filter_map(ExprField::cast) + .filter_map(|field| import_member_chain_data(&field)) + .filter_map(|(base, chain, range)| { + (base == binding_name && chain == fields).then_some(range) + }) + .collect() +} + +fn import_member_chain_data(field: &ExprField) -> Option<(String, Vec, TextRange)> { + let field_ident = field.field()?.ident_lit()?; + let leaf_range = field_ident.text_range(); + let mut chain_rev = vec![field_ident.text().to_string()]; + let mut current_base = field.base()?; + + loop { + match current_base.expr_base()? { + ExprBase::ExprField(parent_field) => { + let parent_ident = parent_field.field()?.ident_lit()?; + chain_rev.push(parent_ident.text().to_string()); + current_base = parent_field.base()?; + } + ExprBase::ExprVar(var) => { + let binding = var.name()?.ident_lit()?.text().to_string(); + chain_rev.reverse(); + return Some((binding, chain_rev, leaf_range)); + } + _ => return None, + } + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/rename.rs b/crates/jrsonnet-lsp/src/server/async_requests/rename.rs new file mode 100644 index 00000000..3d17a796 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/rename.rs @@ -0,0 +1,104 @@ +use std::collections::HashMap; + +use jrsonnet_lsp_document::{CanonicalPath, SymbolName}; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{RenameParams, TextEdit, WorkspaceEdit}; +use tracing::warn; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn rename(&self, params: &RenameParams) -> Option { + let uri = ¶ms.text_document_position.text_document.uri; + let position = params.text_document_position.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let new_name = match SymbolName::new(¶ms.new_name) { + Ok(name) => name, + Err(err) => { + warn!("rename rejected: {}", err); + return None; + } + }; + + let lsp_pos = position.into(); + let semantic = self.documents.get_semantic_artifacts(&path); + if let Some(rename) = self.rename_from_import_member_use_site( + &doc, + &path, + lsp_pos, + &new_name, + semantic.as_deref(), + ) { + return Some(rename); + } + + self.ensure_precise_transitive_importers(&path); + let import_graph = self.import_graph.read(); + handlers::rename_cross_file( + &doc, + lsp_pos, + &new_name, + uri, + &path, + &self.documents, + &import_graph, + ) + } + + fn rename_from_import_member_use_site( + &self, + document: &jrsonnet_lsp_document::Document, + path: &CanonicalPath, + position: jrsonnet_lsp_document::LspPosition, + new_name: &SymbolName, + semantic: Option<&jrsonnet_lsp_inference::SemanticArtifacts>, + ) -> Option { + let handlers::DefinitionResult::ImportField { + path: import_path, + fields, + } = handlers::goto_definition_with_semantic(document, position, semantic)? + else { + return None; + }; + + let target_path = self.resolve_import_path(path, &import_path)?; + let declaration = self + .find_field_in_file(&target_path, &fields) + .map(|locations| locations.declaration) + .or_else(|| self.find_export_binding_in_file(&target_path, &fields))?; + let refs = + self.collect_import_member_references(&target_path, declaration, &fields, true)?; + let mut changes: HashMap> = HashMap::new(); + for reference in refs { + changes.entry(reference.uri).or_default().push(TextEdit { + range: reference.range, + new_text: new_name.as_ref().to_string(), + }); + } + + for edits in changes.values_mut() { + edits.sort_by(|left, right| { + ( + left.range.start.line, + left.range.start.character, + left.range.end.line, + left.range.end.character, + ) + .cmp(&( + right.range.start.line, + right.range.start.character, + right.range.end.line, + right.range.end.character, + )) + }); + } + + Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_full.rs b/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_full.rs new file mode 100644 index 00000000..0ce12d58 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_full.rs @@ -0,0 +1,18 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{SemanticTokensParams, SemanticTokensResult}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn semantic_tokens_full( + &self, + params: &SemanticTokensParams, + ) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + Some(handlers::semantic_tokens(&doc).into()) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_range.rs b/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_range.rs new file mode 100644 index 00000000..cc963ace --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_range.rs @@ -0,0 +1,18 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{SemanticTokensRangeParams, SemanticTokensRangeResult}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn semantic_tokens_range( + &self, + params: &SemanticTokensRangeParams, + ) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + Some(handlers::semantic_tokens_range(&doc, params.range).into()) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/signature_help.rs b/crates/jrsonnet-lsp/src/server/async_requests/signature_help.rs new file mode 100644 index 00000000..afb4939a --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/signature_help.rs @@ -0,0 +1,18 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{SignatureHelp, SignatureHelpParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn signature_help(&self, params: &SignatureHelpParams) -> Option { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let lsp_pos = position.into(); + + handlers::signature_help(&doc, lsp_pos) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs new file mode 100644 index 00000000..756357d0 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs @@ -0,0 +1,158 @@ +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{SymbolInformation, WorkspaceSymbolParams, WorkspaceSymbolResponse}; +use rayon::prelude::*; + +use super::{super::unique_files, AsyncRequestContext}; + +const MAX_WORKSPACE_SYMBOL_RESULTS: usize = 128; +type WorkspaceSymbolSortKey = (u8, usize, String, String, u32, u32, u32, u32); + +impl AsyncRequestContext { + pub(crate) fn workspace_symbol( + &self, + params: &WorkspaceSymbolParams, + ) -> Option { + let query = ¶ms.query; + self.ensure_workspace_discovered(); + + let files = { + let known_workspace_files = self.workspace.known_files.read(); + let graph_files = self.import_graph.read().all_files().collect::>(); + unique_files( + known_workspace_files + .iter() + .copied() + .chain(graph_files) + .chain(self.documents.open_files()), + ) + }; + + let all_symbols: Vec = files + .into_par_iter() + .flat_map(|file| { + let Some(path) = self.documents.path(file) else { + return Vec::new(); + }; + let Some(doc) = self.documents.get_document_file(file) else { + return Vec::new(); + }; + let Ok(uri) = path.to_uri() else { + return Vec::new(); + }; + handlers::workspace_symbols_for_document(&doc, &uri, query) + }) + .collect(); + + let query_lower = query.to_lowercase(); + let all_symbols = rank_workspace_symbols(all_symbols, &query_lower); + + if all_symbols.is_empty() { + return None; + } + Some(WorkspaceSymbolResponse::Flat(all_symbols)) + } +} + +fn rank_workspace_symbols( + all_symbols: Vec, + query_lower: &str, +) -> Vec { + let ranked_symbols: Vec<(WorkspaceSymbolSortKey, SymbolInformation)> = all_symbols + .into_iter() + .map(|symbol| (workspace_symbol_sort_key(&symbol, query_lower), symbol)) + .collect(); + + take_top_ranked(ranked_symbols, MAX_WORKSPACE_SYMBOL_RESULTS) + .into_iter() + .map(|(_, symbol)| symbol) + .collect() +} + +fn take_top_ranked(mut ranked: Vec<(K, V)>, max_results: usize) -> Vec<(K, V)> { + if max_results == 0 { + return Vec::new(); + } + if ranked.len() > max_results { + let nth = max_results - 1; + ranked.select_nth_unstable_by(nth, |(left, _), (right, _)| left.cmp(right)); + ranked.truncate(max_results); + } + ranked.sort_unstable_by(|(left, _), (right, _)| left.cmp(right)); + ranked +} + +fn workspace_symbol_sort_key( + symbol: &SymbolInformation, + query_lower: &str, +) -> WorkspaceSymbolSortKey { + let name_lower = symbol.name.to_lowercase(); + ( + workspace_symbol_match_rank(&name_lower, query_lower), + name_lower.len(), + name_lower, + symbol.location.uri.as_str().to_string(), + symbol.location.range.start.line, + symbol.location.range.start.character, + symbol.location.range.end.line, + symbol.location.range.end.character, + ) +} + +fn workspace_symbol_match_rank(name_lower: &str, query_lower: &str) -> u8 { + if query_lower.is_empty() { + return 0; + } + if name_lower == query_lower { + return 0; + } + if name_lower.starts_with(query_lower) { + return 1; + } + 2 +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_take_top_ranked_orders_by_key() { + let ranked = take_top_ranked( + vec![ + ((2u8, 0usize), "tail"), + ((1, 1), "prefix_b"), + ((1, 0), "prefix_a"), + ((0, 0), "exact"), + ], + 4, + ); + + let values: Vec<_> = ranked.into_iter().map(|(_, value)| value).collect(); + assert_eq!(values, vec!["exact", "prefix_a", "prefix_b", "tail"]); + } + + #[test] + fn test_take_top_ranked_truncates_to_limit() { + let ranked = take_top_ranked( + (0..(MAX_WORKSPACE_SYMBOL_RESULTS + 5)) + .map(|i| ((1u8, i), i)) + .collect(), + MAX_WORKSPACE_SYMBOL_RESULTS, + ); + + assert_eq!(ranked.len(), MAX_WORKSPACE_SYMBOL_RESULTS); + assert_eq!(ranked.first().map(|((_, i), _)| *i), Some(0)); + assert_eq!( + ranked.last().map(|((_, i), _)| *i), + Some(MAX_WORKSPACE_SYMBOL_RESULTS - 1) + ); + } + + #[test] + fn test_workspace_symbol_match_rank() { + assert_eq!(workspace_symbol_match_rank("foo", ""), 0); + assert_eq!(workspace_symbol_match_rank("foo", "foo"), 0); + assert_eq!(workspace_symbol_match_rank("foobar", "foo"), 1); + assert_eq!(workspace_symbol_match_rank("barfoo", "foo"), 2); + } +} diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/eval_expression.rs b/crates/jrsonnet-lsp/src/server/custom_operations/eval_expression.rs new file mode 100644 index 00000000..adc1462e --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/eval_expression.rs @@ -0,0 +1,45 @@ +use anyhow::Context as _; +use lsp_types::TextDocumentIdentifier; +use serde::Deserialize; + +use super::operation_spec::OperationSpec; +use crate::server::async_requests::AsyncRequestContext; + +const CUSTOM_METHOD: &str = "jrsonnet/evalExpression"; + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct EvalExpressionParams { + expression: String, + #[serde(default)] + base_document: Option, +} + +pub(super) const OPERATION: OperationSpec = OperationSpec { + custom_method: CUSTOM_METHOD, + execute_command: None, + code_lens: None, + handle_custom_request, + handle_execute_command: None, +}; + +fn handle_custom_request( + context: &AsyncRequestContext, + params: serde_json::Value, +) -> anyhow::Result { + let params: EvalExpressionParams = serde_json::from_value(params).context( + "invalid params for jrsonnet/evalExpression: expected expression and optional baseDocument.uri", + )?; + let base_uri = params.base_document.as_ref().map(|doc| doc.uri.as_str()); + Ok(context.execute_eval_expression(¶ms.expression, base_uri)) +} + +#[cfg(test)] +mod tests { + use super::CUSTOM_METHOD; + + #[test] + fn operation_id_is_stable() { + assert_eq!(CUSTOM_METHOD, "jrsonnet/evalExpression"); + } +} diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs b/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs new file mode 100644 index 00000000..fac08a30 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs @@ -0,0 +1,131 @@ +use anyhow::Context as _; +use jrsonnet_lsp_document::{CanonicalPath, Document}; +use lsp_types::{CodeLens, Command, Position, Range, TextDocumentIdentifier, Uri}; +use serde::Deserialize; + +use super::operation_spec::OperationSpec; +use crate::server::async_requests::AsyncRequestContext; + +const CUSTOM_METHOD: &str = "jrsonnet/evalFile"; +const EXECUTE_COMMAND: &str = "jrsonnet.evalFile"; + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct EvalFileParams { + text_document: TextDocumentIdentifier, +} + +pub(super) const OPERATION: OperationSpec = OperationSpec { + custom_method: CUSTOM_METHOD, + execute_command: Some(EXECUTE_COMMAND), + code_lens: Some(code_lenses), + handle_custom_request, + handle_execute_command: Some(handle_execute_command), +}; + +fn handle_custom_request( + context: &AsyncRequestContext, + params: serde_json::Value, +) -> anyhow::Result { + let params: EvalFileParams = serde_json::from_value(params) + .context("invalid params for jrsonnet/evalFile: expected textDocument.uri")?; + context + .execute_eval_file(params.text_document.uri.as_str()) + .context("could not evaluate requested file") +} + +fn handle_execute_command( + context: &AsyncRequestContext, + args: &[serde_json::Value], +) -> Option { + let uri = args.first()?.as_str()?; + context.execute_eval_file(uri) +} + +fn code_lenses( + _context: &AsyncRequestContext, + _path: &CanonicalPath, + document: &Document, + uri: &Uri, +) -> Vec { + build_code_lenses(document, uri) +} + +fn build_code_lenses(document: &Document, uri: &Uri) -> Vec { + if document.text().trim().is_empty() || document.ast().expr().is_none() { + return Vec::new(); + } + + vec![CodeLens { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 0, + }, + }, + command: Some(Command { + title: "Evaluate".to_string(), + command: EXECUTE_COMMAND.to_string(), + arguments: Some(vec![serde_json::json!(uri.to_string())]), + }), + data: None, + }] +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + + use super::{build_code_lenses, CUSTOM_METHOD, EXECUTE_COMMAND}; + + #[test] + fn operation_ids_are_stable() { + assert_eq!(CUSTOM_METHOD, "jrsonnet/evalFile"); + assert_eq!(EXECUTE_COMMAND, "jrsonnet.evalFile"); + } + + #[test] + fn eval_file_lens_present_for_root_expression() { + let uri: lsp_types::Uri = "file:///test/main.jsonnet" + .parse() + .expect("expected success"); + let doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + let lenses = build_code_lenses(&doc, &uri); + + assert_eq!( + lenses, + vec![lsp_types::CodeLens { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 0, + }, + end: lsp_types::Position { + line: 0, + character: 0, + }, + }, + command: Some(lsp_types::Command { + title: "Evaluate".to_string(), + command: EXECUTE_COMMAND.to_string(), + arguments: Some(vec![serde_json::json!(uri.to_string())]), + }), + data: None, + }] + ); + } + + #[test] + fn eval_file_lens_absent_without_root_expression() { + let uri: lsp_types::Uri = "file:///test/main.jsonnet" + .parse() + .expect("expected success"); + let doc = Document::new(String::new(), DocVersion::new(1)); + let lenses = build_code_lenses(&doc, &uri); + assert!(lenses.is_empty()); + } +} diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/find_transitive_importers.rs b/crates/jrsonnet-lsp/src/server/custom_operations/find_transitive_importers.rs new file mode 100644 index 00000000..4ea06392 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/find_transitive_importers.rs @@ -0,0 +1,44 @@ +use anyhow::Context as _; +use lsp_types::TextDocumentIdentifier; +use serde::Deserialize; + +use super::operation_spec::OperationSpec; +use crate::server::async_requests::AsyncRequestContext; + +const CUSTOM_METHOD: &str = "jrsonnet/findTransitiveImporters"; + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct FindTransitiveImportersParams { + text_document: TextDocumentIdentifier, +} + +pub(super) const OPERATION: OperationSpec = OperationSpec { + custom_method: CUSTOM_METHOD, + execute_command: None, + code_lens: None, + handle_custom_request, + handle_execute_command: None, +}; + +fn handle_custom_request( + context: &AsyncRequestContext, + params: serde_json::Value, +) -> anyhow::Result { + let params: FindTransitiveImportersParams = serde_json::from_value(params).context( + "invalid params for jrsonnet/findTransitiveImporters: expected textDocument.uri", + )?; + context + .execute_find_transitive_importers(params.text_document.uri.as_str()) + .context("could not compute transitive importers") +} + +#[cfg(test)] +mod tests { + use super::CUSTOM_METHOD; + + #[test] + fn operation_id_is_stable() { + assert_eq!(CUSTOM_METHOD, "jrsonnet/findTransitiveImporters"); + } +} diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/mod.rs b/crates/jrsonnet-lsp/src/server/custom_operations/mod.rs new file mode 100644 index 00000000..eb39d63c --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/mod.rs @@ -0,0 +1,10 @@ +mod eval_expression; +mod eval_file; +mod find_transitive_importers; +mod operation_spec; +mod registry; + +pub(super) use registry::{ + execute_command_ids, extend_code_lenses, operation_for_custom_method, + operation_for_execute_command, +}; diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/operation_spec.rs b/crates/jrsonnet-lsp/src/server/custom_operations/operation_spec.rs new file mode 100644 index 00000000..b6aab92b --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/operation_spec.rs @@ -0,0 +1,19 @@ +use jrsonnet_lsp_document::{CanonicalPath, Document}; +use lsp_types::{CodeLens, Uri}; + +use super::super::async_requests::AsyncRequestContext; + +pub(in crate::server) type CodeLensProvider = + fn(&AsyncRequestContext, &CanonicalPath, &Document, &Uri) -> Vec; +pub(in crate::server) type CustomRequestHandler = + fn(&AsyncRequestContext, serde_json::Value) -> anyhow::Result; +pub(in crate::server) type ExecuteCommandHandler = + fn(&AsyncRequestContext, &[serde_json::Value]) -> Option; + +pub(in crate::server) struct OperationSpec { + pub(in crate::server) custom_method: &'static str, + pub(in crate::server) execute_command: Option<&'static str>, + pub(in crate::server) code_lens: Option, + pub(in crate::server) handle_custom_request: CustomRequestHandler, + pub(in crate::server) handle_execute_command: Option, +} diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/registry.rs b/crates/jrsonnet-lsp/src/server/custom_operations/registry.rs new file mode 100644 index 00000000..ac10b443 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/registry.rs @@ -0,0 +1,95 @@ +use std::{collections::HashMap, sync::LazyLock}; + +use jrsonnet_lsp_document::{CanonicalPath, Document}; +use lsp_types::{CodeLens, Uri}; + +use super::{ + super::async_requests::AsyncRequestContext, eval_expression, eval_file, + find_transitive_importers, operation_spec::OperationSpec, +}; + +const OPERATIONS: &[OperationSpec] = &[ + eval_file::OPERATION, + eval_expression::OPERATION, + find_transitive_importers::OPERATION, +]; + +static OPERATIONS_BY_CUSTOM_METHOD: LazyLock> = + LazyLock::new(|| { + OPERATIONS + .iter() + .map(|operation| (operation.custom_method, operation)) + .collect() + }); + +static OPERATIONS_BY_EXECUTE_COMMAND: LazyLock> = + LazyLock::new(|| { + OPERATIONS + .iter() + .filter_map(|operation| { + operation + .execute_command + .map(|command| (command, operation)) + }) + .collect() + }); + +pub(in crate::server) fn operation_for_custom_method( + method: &str, +) -> Option<&'static OperationSpec> { + OPERATIONS_BY_CUSTOM_METHOD.get(method).copied() +} + +pub(in crate::server) fn operation_for_execute_command( + command: &str, +) -> Option<&'static OperationSpec> { + OPERATIONS_BY_EXECUTE_COMMAND.get(command).copied() +} + +pub(in crate::server) fn execute_command_ids() -> Vec { + OPERATIONS + .iter() + .filter_map(|operation| operation.execute_command) + .map(ToString::to_string) + .collect() +} + +pub(in crate::server) fn extend_code_lenses( + context: &AsyncRequestContext, + path: &CanonicalPath, + document: &Document, + uri: &Uri, + lenses: &mut Vec, +) { + for operation in OPERATIONS { + if let Some(code_lens) = operation.code_lens { + lenses.extend(code_lens(context, path, document, uri)); + } + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashSet; + + use super::OPERATIONS; + + #[test] + fn operations_have_unique_custom_method_ids() { + let unique: HashSet<_> = OPERATIONS + .iter() + .map(|operation| operation.custom_method) + .collect(); + assert_eq!(unique.len(), OPERATIONS.len()); + } + + #[test] + fn operations_have_unique_execute_command_ids() { + let command_ids: Vec<_> = OPERATIONS + .iter() + .filter_map(|operation| operation.execute_command) + .collect(); + let unique: HashSet<_> = command_ids.iter().copied().collect(); + assert_eq!(unique.len(), command_ids.len()); + } +} diff --git a/crates/jrsonnet-lsp/src/server/event_loop.rs b/crates/jrsonnet-lsp/src/server/event_loop.rs new file mode 100644 index 00000000..d82936e1 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/event_loop.rs @@ -0,0 +1,125 @@ +use anyhow::Result; +use crossbeam_channel::select; +use lsp_server::{Message, Notification, Request}; +use lsp_types::notification::PublishDiagnostics; +use tracing::{debug, error, warn}; + +use super::Server; +use crate::protocol::request_error::RequestError; + +impl Server { + /// Main message loop. + /// + /// Uses crossbeam select! to handle both LSP messages and async diagnostics results. + pub(super) fn main_loop(&mut self) -> Result<()> { + loop { + // Use select! to wait on either LSP messages or diagnostics results. + // We receive first and then process to avoid borrow conflicts. + enum SelectResult { + LspMessage(Result), + DiagnosticsResult( + Result< + crate::async_diagnostics::DiagnosticsResult, + crossbeam_channel::RecvError, + >, + ), + AsyncRequestResponse(Result), + } + + let result = { + let lsp_receiver = &self.connection.receiver; + let diag_receiver = self.diagnostics.results(); + let request_receiver = &self.request_response_receiver; + + select! { + recv(lsp_receiver) -> msg => SelectResult::LspMessage(msg), + recv(diag_receiver) -> result => SelectResult::DiagnosticsResult(result), + recv(request_receiver) -> response => SelectResult::AsyncRequestResponse(response), + } + }; + + match result { + SelectResult::LspMessage(Ok(msg)) => { + if self.handle_message(msg)? { + break; // Exit notification received + } + } + SelectResult::LspMessage(Err(e)) => { + error!("Error receiving message: {}", e); + break; + } + SelectResult::DiagnosticsResult(Ok(result)) => { + // Send the completed diagnostics to the client + self.send_notification::(result.params)?; + } + SelectResult::DiagnosticsResult(Err(_)) => { + // Diagnostics channel closed, that's fine + debug!("Diagnostics channel closed"); + } + SelectResult::AsyncRequestResponse(Ok(response)) => { + if !self.inflight_requests.send_inflight_response(response)? { + debug!("Dropping async response for non-pending request"); + } + } + SelectResult::AsyncRequestResponse(Err(_)) => { + debug!("Async request response channel closed"); + } + } + } + + Ok(()) + } + + /// Handle a single LSP message. + /// + /// Returns true if exit notification was received. + fn handle_message(&mut self, msg: Message) -> Result { + match msg { + Message::Request(req) => { + if self.shutdown_requested { + // After shutdown, only respond with errors. + let Request { id, method, .. } = req; + let request = self.inflight_requests.begin_unknown(id, method.as_str()); + let _ = self + .inflight_requests + .send_unknown_error(request, RequestError::ServerShuttingDown)?; + } else { + self.handle_request(req)?; + } + Ok(false) + } + Message::Response(resp) => { + if let Some(meta) = self.inflight_requests.complete_outgoing(resp.id.clone()) { + if let Some(error) = &resp.error { + warn!( + "Outgoing request {} ({}) failed: {} ({})", + resp.id, meta.method, error.message, error.code + ); + } else { + debug!( + "Received response for outgoing request {} ({})", + resp.id, meta.method + ); + } + } else { + debug!("Received untracked response: {:?}", resp.id); + } + Ok(false) + } + Message::Notification(notif) => self.handle_notification(notif), + } + } +} + +impl Server { + /// Send a notification to the client. + pub(super) fn send_notification( + &self, + params: N::Params, + ) -> Result<()> { + let params = serde_json::to_value(params)?; + let notif = Notification::new(N::METHOD.to_string(), params); + self.connection.sender.send(Message::Notification(notif))?; + Ok(()) + } +} diff --git a/crates/jrsonnet-lsp/src/server/import_graph.rs b/crates/jrsonnet-lsp/src/server/import_graph.rs new file mode 100644 index 00000000..2e0ce991 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/import_graph.rs @@ -0,0 +1,107 @@ +use super::*; + +impl Server { + /// Update the import graph for a document. + /// + /// Parses the document's import statements and updates the graph + /// so that cross-file references can be found efficiently. + pub(super) fn update_import_graph(&self, file: FileId) { + Self::update_import_graph_for_file_mode( + &self.documents, + &self.import_graph, + &self.config, + file, + jrsonnet_lsp_import::ImportParseMode::Precise, + ); + } + + pub(super) fn unresolved_files_for_reindex(&self) -> Vec { + self.import_graph.read().files_with_unresolved_imports() + } + + pub(super) fn update_import_graph_for_file_mode( + documents: &SharedDocumentManager, + import_graph: &Arc>, + config: &SharedConfig, + file: FileId, + parse_mode: jrsonnet_lsp_import::ImportParseMode, + ) { + let entries = { + let Some(path) = documents.path(file) else { + return; + }; + let Some(doc) = documents.get_document_file(file) else { + // File no longer exists or cannot be read. + import_graph.write().remove_file(file); + return; + }; + + let config = config.read(); + let import_roots = effective_import_roots( + path.as_ref().as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + drop(config); + + // Parse imports OUTSIDE the graph lock to minimize lock hold time. + let import_resolution = ImportResolution::new(path.as_ref(), &import_roots); + match parse_mode { + jrsonnet_lsp_import::ImportParseMode::Precise => { + import_resolution.parse_entries(&doc) + } + jrsonnet_lsp_import::ImportParseMode::Approximate => { + import_resolution.parse_entries_approximate(&doc) + } + } + }; + + // Acquire the write lock and perform the graph update; entry file-id + // resolution is handled defensively by the graph update path. + import_graph + .write() + .update_file_with_entries_mode(file, entries, parse_mode); + } + + /// Schedule diagnostics for currently-open files that import `file`. + pub(super) fn schedule_diagnostics_for_open_importers(&self, file: FileId) { + let importers = self.import_graph.read().transitive_importers(file); + for importer in importers { + if !self.documents.is_open_file(importer) { + continue; + } + + self.schedule_diagnostics_file(importer); + } + } + + /// Schedule diagnostics computation for a file. + pub(super) fn schedule_diagnostics_file(&self, file: FileId) { + let request = self.documents.path(file).and_then(|path| { + let doc = self.documents.get(path.as_ref())?; + let (enable_lint, import_roots) = { + let config = self.config.read(); + ( + config.lint_diagnostics_enabled(), + effective_import_roots( + path.as_ref().as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ), + ) + }; + Some(( + doc.text().to_string(), + doc.version(), + enable_lint, + import_roots, + )) + }); + let Some((text, version, enable_lint, import_roots)) = request else { + return; + }; + + self.diagnostics + .schedule(file, text, version, enable_lint, import_roots); + } +} diff --git a/crates/jrsonnet-lsp/src/server/initialization.rs b/crates/jrsonnet-lsp/src/server/initialization.rs new file mode 100644 index 00000000..80bebf95 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/initialization.rs @@ -0,0 +1,125 @@ +use anyhow::Context; +use jrsonnet_lsp_handlers as handlers; +use lsp_server::{Message, RequestId}; +use lsp_types::{ + CodeActionKind, CodeActionOptions, CodeActionProviderCapability, CodeLensOptions, + CompletionOptions, ExecuteCommandOptions, HoverProviderCapability, InitializeParams, + InitializeResult, OneOf, SemanticTokensFullOptions, SemanticTokensOptions, + SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelpOptions, + TextDocumentSyncCapability, TextDocumentSyncKind, WorkDoneProgressOptions, +}; +use tracing::info; + +use super::{custom_operations, InitializeRoots, Server}; + +impl Server { + /// Handle the initialize request. + pub(super) fn initialize( + &self, + ) -> anyhow::Result<(RequestId, InitializeParams, InitializeRoots)> { + let msg = self + .connection + .receiver + .recv() + .context("Failed to receive initialize request")?; + + match msg { + Message::Request(req) if req.method == "initialize" => { + let init_roots: InitializeRoots = + serde_json::from_value(req.params.clone()).unwrap_or_default(); + let params: InitializeParams = serde_json::from_value(req.params)?; + info!("Initialize request from: {:?}", params.client_info); + Ok((req.id, params, init_roots)) + } + _ => anyhow::bail!("Expected initialize request, got: {msg:?}"), + } + } + + /// Get the server capabilities. + pub(super) fn server_capabilities() -> ServerCapabilities { + ServerCapabilities { + text_document_sync: Some(TextDocumentSyncCapability::Options( + lsp_types::TextDocumentSyncOptions { + open_close: Some(true), + change: Some(TextDocumentSyncKind::INCREMENTAL), + will_save: None, + will_save_wait_until: None, + save: Some(lsp_types::TextDocumentSyncSaveOptions::Supported(true)), + }, + )), + document_symbol_provider: Some(OneOf::Left(true)), + definition_provider: Some(OneOf::Left(true)), + declaration_provider: Some(lsp_types::DeclarationCapability::Simple(true)), + implementation_provider: Some(lsp_types::ImplementationProviderCapability::Simple( + true, + )), + type_definition_provider: Some(lsp_types::TypeDefinitionProviderCapability::Simple( + true, + )), + hover_provider: Some(HoverProviderCapability::Simple(true)), + document_highlight_provider: Some(OneOf::Left(true)), + inlay_hint_provider: Some(OneOf::Left(true)), + code_action_provider: Some(CodeActionProviderCapability::Options(CodeActionOptions { + code_action_kinds: Some(vec![ + CodeActionKind::QUICKFIX, + CodeActionKind::SOURCE_FIX_ALL, + ]), + work_done_progress_options: WorkDoneProgressOptions::default(), + resolve_provider: Some(false), + })), + completion_provider: Some(CompletionOptions { + trigger_characters: Some(vec![".".to_string()]), + ..Default::default() + }), + signature_help_provider: Some(SignatureHelpOptions { + trigger_characters: Some(vec!["(".to_string(), ",".to_string()]), + retrigger_characters: None, + work_done_progress_options: WorkDoneProgressOptions::default(), + }), + document_formatting_provider: Some(OneOf::Left(true)), + document_range_formatting_provider: Some(OneOf::Left(true)), + references_provider: Some(OneOf::Left(true)), + workspace_symbol_provider: Some(OneOf::Left(true)), + rename_provider: Some(OneOf::Right(lsp_types::RenameOptions { + prepare_provider: Some(true), + work_done_progress_options: WorkDoneProgressOptions::default(), + })), + semantic_tokens_provider: Some( + SemanticTokensServerCapabilities::SemanticTokensOptions(SemanticTokensOptions { + legend: handlers::semantic_tokens_legend(), + full: Some(SemanticTokensFullOptions::Bool(true)), + range: Some(true), + work_done_progress_options: WorkDoneProgressOptions::default(), + }), + ), + execute_command_provider: Some(ExecuteCommandOptions { + commands: custom_operations::execute_command_ids(), + work_done_progress_options: WorkDoneProgressOptions::default(), + }), + code_lens_provider: Some(CodeLensOptions { + resolve_provider: Some(true), + }), + ..Default::default() + } + } + + pub(super) fn supports_inlay_hint_refresh(params: &InitializeParams) -> bool { + params + .capabilities + .workspace + .as_ref() + .and_then(|workspace| workspace.inlay_hint.as_ref()) + .and_then(|capabilities| capabilities.refresh_support) + .unwrap_or(false) + } + + pub(super) fn initialize_result() -> InitializeResult { + InitializeResult { + capabilities: Self::server_capabilities(), + server_info: Some(ServerInfo { + name: "jrsonnet-lsp".to_string(), + version: Some(env!("CARGO_PKG_VERSION").to_string()), + }), + } + } +} diff --git a/crates/jrsonnet-lsp/src/server/notifications.rs b/crates/jrsonnet-lsp/src/server/notifications.rs new file mode 100644 index 00000000..fae043cc --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/notifications.rs @@ -0,0 +1,355 @@ +use lsp_types::{ + notification::{ + Cancel, DidChangeConfiguration, DidChangeTextDocument, DidChangeWatchedFiles, + DidCloseTextDocument, DidOpenTextDocument, DidSaveTextDocument, Notification as _, + }, + request::InlayHintRefreshRequest, + DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams, + DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, + FileChangeType, +}; + +use super::*; + +impl Server { + fn request_inlay_hint_refresh_if_supported(&mut self) { + if !self.client_supports_inlay_hint_refresh { + return; + } + + if let Err(error) = self + .inflight_requests + .send_outgoing_request::(()) + { + warn!("Failed to request inlay-hint refresh: {error:#}"); + } + } + + /// Handle an incoming notification. + /// + /// Returns true if exit notification was received. + pub(super) fn handle_notification(&mut self, notif: Notification) -> Result { + debug!("Handling notification: {}", notif.method); + + match notif.method.as_str() { + Cancel::METHOD => { + let params: lsp_types::CancelParams = serde_json::from_value(notif.params)?; + self.on_cancel_request(params)?; + } + DidOpenTextDocument::METHOD => { + let params: DidOpenTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_open(params); + } + DidChangeTextDocument::METHOD => { + let params: DidChangeTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_change(params); + } + DidCloseTextDocument::METHOD => { + let params: DidCloseTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_close(¶ms)?; + } + DidSaveTextDocument::METHOD => { + let params: DidSaveTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_save(params); + } + DidChangeConfiguration::METHOD => { + let params: DidChangeConfigurationParams = serde_json::from_value(notif.params)?; + self.on_did_change_configuration(params); + } + DidChangeWatchedFiles::METHOD => { + let params: DidChangeWatchedFilesParams = serde_json::from_value(notif.params)?; + self.on_did_change_watched_files(params); + } + "exit" => { + info!("Exit notification received"); + return Ok(true); + } + _ => { + debug!("Unhandled notification: {}", notif.method); + } + } + + Ok(false) + } + + pub(super) fn on_cancel_request(&mut self, params: lsp_types::CancelParams) -> Result<()> { + let request_id = Self::request_id_from_number_or_string(params.id); + if !self.inflight_requests.cancel_request(request_id.clone())? { + debug!("Ignoring cancel request for non-pending id {}", request_id); + } + Ok(()) + } + + /// Handle textDocument/didOpen notification. + pub(super) fn on_did_open(&self, params: DidOpenTextDocumentParams) { + let uri = ¶ms.text_document.uri; + info!("Document opened: {}", uri.as_str()); + + let Ok(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return; + }; + + let text = params.text_document.text; + let version = DocVersion::new(params.text_document.version); + let file = self.documents.intern(&path); + self.register_known_workspace_file(file); + + self.documents.open(path.clone(), text, version); + self.documents.refresh_semantic_artifacts(&path); + + // Invalidate type cache for this file and all files that depend on it + self.invalidate_type_cache_with_dependents(file); + + // Update import graph + self.update_import_graph(file); + self.mark_workspace_file_clean(file); + + // Publish diagnostics + self.schedule_diagnostics_file(file); + } + + /// Handle textDocument/didChange notification. + pub(super) fn on_did_change(&self, params: DidChangeTextDocumentParams) { + let uri = ¶ms.text_document.uri; + debug!("Document changed: {}", uri.as_str()); + + let Ok(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return; + }; + + let version = DocVersion::new(params.text_document.version); + let file = self.documents.intern(&path); + self.register_known_workspace_file(file); + + // Process each change (INCREMENTAL sync may send multiple changes) + for change in params.content_changes { + let success = if let Some(range) = change.range { + // Incremental change: apply the range-based edit + self.documents + .apply_incremental_change(&path, range, &change.text, version) + } else { + // Full change: no range means full document replacement + self.documents.update(&path, change.text, version) + }; + + if !success { + warn!("Failed to apply change to document: {}", uri.as_str()); + return; + } + } + + self.documents.refresh_semantic_artifacts(&path); + + // Invalidate type cache for this file and all files that depend on it + self.invalidate_type_cache_with_dependents(file); + + // Update import graph (imports may have changed) + self.update_import_graph(file); + self.mark_workspace_file_clean(file); + + // Publish diagnostics + self.schedule_diagnostics_file(file); + } + + /// Handle textDocument/didClose notification. + pub(super) fn on_did_close(&self, params: &DidCloseTextDocumentParams) -> Result<()> { + let uri = ¶ms.text_document.uri; + info!("Document closed: {}", uri.as_str()); + + let Ok(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return Ok(()); + }; + let file = self.documents.intern(&path); + self.register_known_workspace_file(file); + + self.documents.close(&path); + + // Invalidate type cache for this file and all files that depend on it + // (dependents may have cached types based on this file's exports) + self.invalidate_type_cache_with_dependents(file); + + // Keep import graph semantics for closed documents by re-indexing from + // cached/disk content instead of dropping the file node. + self.update_import_graph(file); + self.mark_workspace_file_clean(file); + self.schedule_diagnostics_for_open_importers(file); + + // Clear diagnostics for closed document + self.send_notification::(lsp_types::PublishDiagnosticsParams { + uri: uri.clone(), + diagnostics: vec![], + version: None, + })?; + + Ok(()) + } + + /// Handle textDocument/didSave notification. + pub(super) fn on_did_save(&self, params: DidSaveTextDocumentParams) { + let uri = ¶ms.text_document.uri; + debug!("Document saved: {}", uri.as_str()); + + let Ok(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return; + }; + let file = self.documents.intern(&path); + self.register_known_workspace_file(file); + + if let Some(text) = params.text { + let Some(doc) = self.documents.get(&path) else { + return; + }; + let version = doc.version(); + drop(doc); + + if !self.documents.update(&path, text, version) { + warn!("Failed to update saved document contents: {}", uri.as_str()); + return; + } + } + + self.invalidate_type_cache_with_dependents(file); + self.update_import_graph(file); + self.mark_workspace_file_clean(file); + if self.documents.is_open_file(file) { + self.schedule_diagnostics_file(file); + } + self.schedule_diagnostics_for_open_importers(file); + } + + /// Handle workspace/didChangeConfiguration notification. + pub(super) fn on_did_change_configuration(&mut self, params: DidChangeConfigurationParams) { + info!("Configuration changed"); + + // The settings can come in different formats depending on the client + // VS Code sends settings under a "jsonnet" key, others may send flat settings + let settings = if let Some(jsonnet_settings) = params + .settings + .as_object() + .and_then(|o| o.get("jsonnet")) + .or_else(|| { + params + .settings + .as_object() + .and_then(|o| o.get("jsonnet-language-server")) + }) { + jsonnet_settings.clone() + } else { + params.settings + }; + + let old_config = self.config.read().clone(); + let updated_config = { + let mut config = self.config.write(); + if config.update_from_settings(settings) { + Some(config.clone()) + } else { + None + } + }; + + if let Some(updated_config) = updated_config { + let import_resolution_changed = old_config.jpath != updated_config.jpath + || old_config.resolve_paths_with_tanka != updated_config.resolve_paths_with_tanka; + let runtime_config_changed = import_resolution_changed + || old_config.enable_eval_diagnostics != updated_config.enable_eval_diagnostics; + let diagnostics_config_changed = + old_config.enable_lint_diagnostics != updated_config.enable_lint_diagnostics; + let inlay_hints_config_changed = old_config.inlay_hints != updated_config.inlay_hints; + + if runtime_config_changed { + self.reconfigure_runtime_components(&updated_config); + debug!("Runtime components reconfigured after settings update"); + } + + if import_resolution_changed { + // Import resolution and cached file types depend on jpath/tanka settings. + self.type_cache.write().clear(); + self.mark_workspace_known_files_dirty(); + let tracked_files = { + let graph = self.import_graph.read(); + graph.all_files().collect::>() + }; + self.mark_workspace_files_dirty(tracked_files); + + for file in self.documents.open_files() { + self.update_import_graph(file); + self.mark_workspace_file_clean(file); + } + } + + if runtime_config_changed || diagnostics_config_changed { + for file in self.documents.open_files() { + self.schedule_diagnostics_file(file); + } + } + + if inlay_hints_config_changed { + self.request_inlay_hint_refresh_if_supported(); + } + + info!( + "Configuration updated: jpath={:?}, eval_diagnostics={}, tanka_mode={}", + updated_config.jpath, + updated_config.enable_eval_diagnostics, + updated_config.resolve_paths_with_tanka + ); + } + } + + /// Handle workspace/didChangeWatchedFiles notification. + /// + /// This keeps import graph and type cache up to date for files that change on disk + /// while not being open in the editor. + pub(super) fn on_did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { + let mut changed_files = Vec::new(); + + for change in params.changes { + let Ok(path) = CanonicalPath::from_uri(&change.uri) else { + continue; + }; + let file = self.documents.intern(&path); + self.register_known_workspace_file(file); + + self.invalidate_type_cache_with_dependents(file); + + match change.typ { + FileChangeType::DELETED => { + let importers = self.import_graph.read().transitive_importers(file); + changed_files.extend(importers.iter().copied()); + self.mark_workspace_files_dirty(importers.iter().copied()); + self.documents.remove_closed(&path); + self.import_graph.write().remove_file(file); + self.remove_known_workspace_file(file); + self.mark_workspace_file_clean(file); + } + FileChangeType::CHANGED | FileChangeType::CREATED => { + if self.documents.is_open(&path) { + self.update_import_graph(file); + self.mark_workspace_file_clean(file); + } else { + self.documents.refresh_closed_from_disk(&path); + self.mark_workspace_file_dirty(file); + } + if change.typ == FileChangeType::CREATED { + self.mark_workspace_files_dirty(self.unresolved_files_for_reindex()); + } + } + _ => {} + } + + changed_files.push(file); + } + + for file in unique_files(changed_files) { + if self.documents.is_open_file(file) { + self.schedule_diagnostics_file(file); + } + self.schedule_diagnostics_for_open_importers(file); + } + } +} diff --git a/crates/jrsonnet-lsp/src/server/request_dispatch.rs b/crates/jrsonnet-lsp/src/server/request_dispatch.rs new file mode 100644 index 00000000..a64076c4 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/request_dispatch.rs @@ -0,0 +1,420 @@ +use lsp_server::RequestId; +use lsp_types::{ + request::{ + CodeActionRequest, CodeLensRequest, CodeLensResolve, Completion, DocumentHighlightRequest, + DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, + GotoImplementation, GotoTypeDefinition, HoverRequest, InlayHintRequest, + PrepareRenameRequest, RangeFormatting, References, Rename, Request as _, + SemanticTokensFullRequest, SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, + WorkspaceSymbolRequest, + }, + ExecuteCommandParams, +}; +use serde::{de::DeserializeOwned, Serialize}; +use tracing::{debug, info, warn}; + +use super::{async_requests::AsyncRequestContext, custom_operations, requests, Server}; +use crate::protocol::{inflight_requests::IncomingRequest, request_error::RequestError}; + +impl Server { + /// Handle an incoming request. + pub(super) fn handle_request(&mut self, req: lsp_server::Request) -> anyhow::Result<()> { + debug!("Handling request: {} (id={})", req.method, req.id); + + let lsp_server::Request { id, method, params } = req; + match method.as_str() { + Shutdown::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_shutdown_request(request) + } + GotoDefinition::METHOD + | GotoDeclaration::METHOD + | GotoTypeDefinition::METHOD + | GotoImplementation::METHOD + | HoverRequest::METHOD + | InlayHintRequest::METHOD + | Completion::METHOD + | References::METHOD + | WorkspaceSymbolRequest::METHOD + | Rename::METHOD + | CodeLensRequest::METHOD + | Formatting::METHOD + | RangeFormatting::METHOD + | DocumentSymbolRequest::METHOD + | DocumentHighlightRequest::METHOD + | CodeActionRequest::METHOD + | SignatureHelpRequest::METHOD + | PrepareRenameRequest::METHOD + | SemanticTokensFullRequest::METHOD + | SemanticTokensRangeRequest::METHOD + | ExecuteCommand::METHOD => self.handle_async_request(id, method.as_str(), params), + CodeLensResolve::METHOD => self.handle_sync_request(id, method.as_str(), params), + _ => { + if self.handle_custom_operation_request(id.clone(), method.as_str(), params) { + return Ok(()); + } + let request = self.inflight_requests.begin_unknown(id, method.as_str()); + warn!("Unhandled request: {}", request.method()); + let _ = self.inflight_requests.send_unknown_error( + request, + RequestError::MethodNotFound { + method: method.to_string(), + }, + )?; + Ok(()) + } + } + } + + pub(super) fn handle_shutdown_request( + &mut self, + request: IncomingRequest, + ) -> anyhow::Result<()> { + info!("Shutdown request received"); + self.shutdown_requested = true; + let _ = self.inflight_requests.send_ok(request, ())?; + Ok(()) + } + + fn handle_sync_request( + &mut self, + id: RequestId, + method: &str, + params: serde_json::Value, + ) -> anyhow::Result<()> { + if method == CodeLensResolve::METHOD { + let request = self.inflight_requests.begin::(id); + return self.handle_sync_typed( + request, + params, + requests::sync_handlers::code_lens_resolve::handle, + ); + } + + let request = self.inflight_requests.begin_unknown(id, method); + warn!("Unhandled request: {}", request.method()); + let _ = self.inflight_requests.send_unknown_error( + request, + RequestError::MethodNotFound { + method: method.to_string(), + }, + )?; + Ok(()) + } + + fn handle_custom_operation_request( + &mut self, + id: RequestId, + method: &str, + params: serde_json::Value, + ) -> bool { + let Some(operation) = custom_operations::operation_for_custom_method(method) else { + return false; + }; + + self.inflight_requests.begin_unknown(id.clone(), method); + let context = self.async_request_context(); + let compute = operation.handle_custom_request; + self.spawn_async_response(id, operation.custom_method, move || { + compute(&context, params) + }); + true + } + + fn handle_sync_typed( + &mut self, + request: IncomingRequest, + params: serde_json::Value, + handler: fn(&Self, &R::Params) -> R::Result, + ) -> anyhow::Result<()> + where + R: lsp_types::request::Request, + R::Params: DeserializeOwned, + R::Result: Serialize, + { + let params: R::Params = match serde_json::from_value(params) { + Ok(params) => params, + Err(err) => { + let _ = self.inflight_requests.send_error( + request, + RequestError::invalid_params(R::METHOD, err.to_string()), + )?; + return Ok(()); + } + }; + + let _ = self + .inflight_requests + .send_ok(request, handler(self, ¶ms))?; + Ok(()) + } + + fn spawn_typed_json_response(&self, request: IncomingRequest, compute: F) + where + R: lsp_types::request::Request, + R::Result: Serialize + Send + 'static, + F: FnOnce() -> R::Result + Send + 'static, + { + let id = request.into_id(); + self.spawn_async_response(id, R::METHOD, move || { + serde_json::to_value(compute()).map_err(Into::into) + }); + } + + fn handle_async_request( + &mut self, + id: RequestId, + method: &str, + params: serde_json::Value, + ) -> anyhow::Result<()> { + match method { + GotoDefinition::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::goto_definition::handle, + ) + } + GotoDeclaration::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::goto_declaration::handle, + ) + } + GotoImplementation::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::goto_implementation::handle, + ) + } + GotoTypeDefinition::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::goto_type_definition::handle, + ) + } + HoverRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, requests::async_handlers::hover::handle) + } + InlayHintRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::inlay_hints::handle, + ) + } + Completion::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::completion::handle, + ) + } + References::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::references::handle, + ) + } + WorkspaceSymbolRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::workspace_symbol::handle, + ) + } + Rename::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, requests::async_handlers::rename::handle) + } + CodeLensRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::code_lens::handle, + ) + } + Formatting::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::formatting::handle, + ) + } + RangeFormatting::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::range_formatting::handle, + ) + } + DocumentSymbolRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::document_symbol::handle, + ) + } + DocumentHighlightRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::document_highlight::handle, + ) + } + CodeActionRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::code_action::handle, + ) + } + SignatureHelpRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::signature_help::handle, + ) + } + PrepareRenameRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::prepare_rename::handle, + ) + } + SemanticTokensFullRequest::METHOD => { + let request = self + .inflight_requests + .begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::semantic_tokens_full::handle, + ) + } + SemanticTokensRangeRequest::METHOD => { + let request = self + .inflight_requests + .begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::semantic_tokens_range::handle, + ) + } + ExecuteCommand::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_execute_command(request, params) + } + _ => { + let request = self.inflight_requests.begin_unknown(id, method); + warn!("Unhandled request: {}", request.method()); + let _ = self.inflight_requests.send_unknown_error( + request, + RequestError::MethodNotFound { + method: method.to_string(), + }, + )?; + Ok(()) + } + } + } + + fn handle_async_typed( + &mut self, + request: IncomingRequest, + params: serde_json::Value, + handler: fn(&AsyncRequestContext, &R::Params) -> R::Result, + ) -> anyhow::Result<()> + where + R: lsp_types::request::Request, + R::Params: DeserializeOwned + Send + 'static, + R::Result: Serialize + Send + 'static, + { + let params: R::Params = match serde_json::from_value(params) { + Ok(params) => params, + Err(err) => { + let _ = self.inflight_requests.send_error( + request, + RequestError::invalid_params(R::METHOD, err.to_string()), + )?; + return Ok(()); + } + }; + let context = self.async_request_context(); + self.spawn_typed_json_response(request, move || handler(&context, ¶ms)); + Ok(()) + } + + fn handle_async_execute_command( + &mut self, + request: IncomingRequest, + params: serde_json::Value, + ) -> anyhow::Result<()> { + let params: ExecuteCommandParams = match serde_json::from_value(params) { + Ok(params) => params, + Err(err) => { + let _ = self.inflight_requests.send_error( + request, + RequestError::invalid_params(ExecuteCommand::METHOD, err.to_string()), + )?; + return Ok(()); + } + }; + if !Self::is_supported_execute_command(¶ms.command) { + let _ = self.inflight_requests.send_error( + request, + RequestError::UnknownExecuteCommand { + command: params.command, + }, + )?; + return Ok(()); + } + + if let Some(operation) = custom_operations::operation_for_execute_command(¶ms.command) { + let context = self.async_request_context(); + let args = params.arguments.clone(); + let Some(compute) = operation.handle_execute_command else { + let _ = self.inflight_requests.send_error( + request, + RequestError::MissingExecuteHandler { + command: params.command.clone(), + }, + )?; + return Ok(()); + }; + self.spawn_typed_json_response(request, move || compute(&context, &args)); + return Ok(()); + } + + let _ = self.inflight_requests.send_error( + request, + RequestError::UnknownExecuteCommand { + command: params.command, + }, + )?; + Ok(()) + } +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_action.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_action.rs new file mode 100644 index 00000000..f967b5ac --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_action.rs @@ -0,0 +1,10 @@ +use lsp_types::{CodeActionParams, CodeActionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &CodeActionParams, +) -> Option { + context.code_action(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_lens.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_lens.rs new file mode 100644 index 00000000..99848c7e --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_lens.rs @@ -0,0 +1,10 @@ +use lsp_types::{CodeLens, CodeLensParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &CodeLensParams, +) -> Option> { + context.code_lens(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/completion.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/completion.rs new file mode 100644 index 00000000..40db9230 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/completion.rs @@ -0,0 +1,10 @@ +use lsp_types::{CompletionParams, CompletionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &CompletionParams, +) -> Option { + context.completion(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_highlight.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_highlight.rs new file mode 100644 index 00000000..b93f11f8 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_highlight.rs @@ -0,0 +1,10 @@ +use lsp_types::{DocumentHighlight, DocumentHighlightParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &DocumentHighlightParams, +) -> Option> { + context.document_highlight(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_symbol.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_symbol.rs new file mode 100644 index 00000000..1bb0b8e0 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_symbol.rs @@ -0,0 +1,10 @@ +use lsp_types::{DocumentSymbolParams, DocumentSymbolResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &DocumentSymbolParams, +) -> Option { + context.document_symbol(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/formatting.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/formatting.rs new file mode 100644 index 00000000..5e3832c8 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/formatting.rs @@ -0,0 +1,10 @@ +use lsp_types::{DocumentFormattingParams, TextEdit}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &DocumentFormattingParams, +) -> Option> { + context.formatting(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_declaration.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_declaration.rs new file mode 100644 index 00000000..dc777900 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_declaration.rs @@ -0,0 +1,10 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &GotoDefinitionParams, +) -> Option { + context.goto_declaration(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_definition.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_definition.rs new file mode 100644 index 00000000..9f75d14d --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_definition.rs @@ -0,0 +1,10 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &GotoDefinitionParams, +) -> Option { + context.goto_definition(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_implementation.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_implementation.rs new file mode 100644 index 00000000..d605f538 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_implementation.rs @@ -0,0 +1,10 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &GotoDefinitionParams, +) -> Option { + context.goto_implementation(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_type_definition.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_type_definition.rs new file mode 100644 index 00000000..d3a2a82f --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_type_definition.rs @@ -0,0 +1,10 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &GotoDefinitionParams, +) -> Option { + context.goto_type_definition(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/hover.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/hover.rs new file mode 100644 index 00000000..459ac12f --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/hover.rs @@ -0,0 +1,7 @@ +use lsp_types::{Hover, HoverParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle(context: &AsyncRequestContext, params: &HoverParams) -> Option { + context.hover(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/inlay_hints.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/inlay_hints.rs new file mode 100644 index 00000000..6fbcbcdc --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/inlay_hints.rs @@ -0,0 +1,10 @@ +use lsp_types::{InlayHint, InlayHintParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &InlayHintParams, +) -> Option> { + context.inlay_hints(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs new file mode 100644 index 00000000..fa80892f --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs @@ -0,0 +1,20 @@ +pub(crate) mod code_action; +pub(crate) mod code_lens; +pub(crate) mod completion; +pub(crate) mod document_highlight; +pub(crate) mod document_symbol; +pub(crate) mod formatting; +pub(crate) mod goto_declaration; +pub(crate) mod goto_definition; +pub(crate) mod goto_implementation; +pub(crate) mod goto_type_definition; +pub(crate) mod hover; +pub(crate) mod inlay_hints; +pub(crate) mod prepare_rename; +pub(crate) mod range_formatting; +pub(crate) mod references; +pub(crate) mod rename; +pub(crate) mod semantic_tokens_full; +pub(crate) mod semantic_tokens_range; +pub(crate) mod signature_help; +pub(crate) mod workspace_symbol; diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/prepare_rename.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/prepare_rename.rs new file mode 100644 index 00000000..92642917 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/prepare_rename.rs @@ -0,0 +1,10 @@ +use lsp_types::{PrepareRenameResponse, TextDocumentPositionParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &TextDocumentPositionParams, +) -> Option { + context.prepare_rename(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/range_formatting.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/range_formatting.rs new file mode 100644 index 00000000..6167db71 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/range_formatting.rs @@ -0,0 +1,10 @@ +use lsp_types::{DocumentRangeFormattingParams, TextEdit}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &DocumentRangeFormattingParams, +) -> Option> { + context.formatting_range(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/references.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/references.rs new file mode 100644 index 00000000..f31b1f74 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/references.rs @@ -0,0 +1,10 @@ +use lsp_types::{Location, ReferenceParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &ReferenceParams, +) -> Option> { + context.references(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/rename.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/rename.rs new file mode 100644 index 00000000..400ee228 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/rename.rs @@ -0,0 +1,10 @@ +use lsp_types::{RenameParams, WorkspaceEdit}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &RenameParams, +) -> Option { + context.rename(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_full.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_full.rs new file mode 100644 index 00000000..5f7608b7 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_full.rs @@ -0,0 +1,10 @@ +use lsp_types::{SemanticTokensParams, SemanticTokensResult}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &SemanticTokensParams, +) -> Option { + context.semantic_tokens_full(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_range.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_range.rs new file mode 100644 index 00000000..800cdd00 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_range.rs @@ -0,0 +1,10 @@ +use lsp_types::{SemanticTokensRangeParams, SemanticTokensRangeResult}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &SemanticTokensRangeParams, +) -> Option { + context.semantic_tokens_range(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/signature_help.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/signature_help.rs new file mode 100644 index 00000000..70b8a200 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/signature_help.rs @@ -0,0 +1,10 @@ +use lsp_types::{SignatureHelp, SignatureHelpParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &SignatureHelpParams, +) -> Option { + context.signature_help(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/workspace_symbol.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/workspace_symbol.rs new file mode 100644 index 00000000..cb552354 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/workspace_symbol.rs @@ -0,0 +1,10 @@ +use lsp_types::{WorkspaceSymbolParams, WorkspaceSymbolResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &WorkspaceSymbolParams, +) -> Option { + context.workspace_symbol(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/mod.rs b/crates/jrsonnet-lsp/src/server/requests/mod.rs new file mode 100644 index 00000000..77c1357e --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/mod.rs @@ -0,0 +1,7 @@ +//! Request handler routing modules. +//! +//! Async policy: request handlers should default to async execution unless +//! they are cheap, local lookups with predictable low latency. + +pub(crate) mod async_handlers; +pub(crate) mod sync_handlers; diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_lens_resolve.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_lens_resolve.rs new file mode 100644 index 00000000..14e2ed69 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_lens_resolve.rs @@ -0,0 +1,8 @@ +use jrsonnet_lsp_handlers as handlers; +use lsp_types::CodeLens; + +use crate::server::Server; + +pub(crate) fn handle(_server: &Server, params: &CodeLens) -> CodeLens { + handlers::resolve_code_lens(params.clone()) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs new file mode 100644 index 00000000..a1ab4419 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs @@ -0,0 +1,9 @@ +//! Request handlers intentionally kept synchronous. +//! +//! These handlers are intentionally tiny and latency-critical, so they stay +//! on the synchronous path. +//! +//! If a handler here grows into heavier work, promote it to +//! `requests::async_handlers`. + +pub(crate) mod code_lens_resolve; diff --git a/crates/jrsonnet-lsp/src/server/watched_files.rs b/crates/jrsonnet-lsp/src/server/watched_files.rs new file mode 100644 index 00000000..55a8f135 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/watched_files.rs @@ -0,0 +1,135 @@ +use lsp_types::{ + notification::{DidChangeWatchedFiles, Notification as _}, + request::RegisterCapability, + DidChangeWatchedFilesRegistrationOptions, FileSystemWatcher, GlobPattern, NumberOrString, + Registration, RegistrationParams, RelativePattern, +}; + +use super::*; + +impl Server { + pub(super) fn is_supported_execute_command(command: &str) -> bool { + super::custom_operations::operation_for_execute_command(command).is_some() + } + + pub(super) fn supports_dynamic_watched_files_registration(params: &InitializeParams) -> bool { + params + .capabilities + .workspace + .as_ref() + .and_then(|workspace| workspace.did_change_watched_files) + .and_then(|capabilities| capabilities.dynamic_registration) + .unwrap_or(false) + } + + pub(super) fn supports_relative_watch_patterns(params: &InitializeParams) -> bool { + params + .capabilities + .workspace + .as_ref() + .and_then(|workspace| workspace.did_change_watched_files) + .and_then(|capabilities| capabilities.relative_pattern_support) + .unwrap_or(false) + } + + pub(super) fn watched_file_base_uris(init_roots: &InitializeRoots) -> Vec { + let mut uris = Vec::new(); + + if let Some(workspace_folders) = &init_roots.workspace_folders { + for folder in workspace_folders { + uris.push(folder.uri.clone()); + } + } + + if let Some(root_uri) = &init_roots.root_uri { + uris.push(root_uri.clone()); + } + + if let Some(root_path) = &init_roots.root_path { + let root_path = PathBuf::from(root_path); + if let Ok(path) = CanonicalPath::try_from_path(&root_path) { + if let Ok(uri) = path.to_uri() { + uris.push(uri); + } + } + } + + uris.sort_by(|lhs, rhs| lhs.as_str().cmp(rhs.as_str())); + uris.dedup_by(|lhs, rhs| lhs.as_str() == rhs.as_str()); + uris + } + + pub(super) fn watched_file_watchers( + initialize_params: &InitializeParams, + init_roots: &InitializeRoots, + ) -> Vec { + if !Self::supports_relative_watch_patterns(initialize_params) { + return WATCHED_FILE_GLOB_PATTERNS + .into_iter() + .map(|pattern| FileSystemWatcher { + glob_pattern: GlobPattern::String(pattern.to_owned()), + kind: None, + }) + .collect(); + } + + let base_uris = Self::watched_file_base_uris(init_roots); + if base_uris.is_empty() { + return WATCHED_FILE_GLOB_PATTERNS + .into_iter() + .map(|pattern| FileSystemWatcher { + glob_pattern: GlobPattern::String(pattern.to_owned()), + kind: None, + }) + .collect(); + } + + base_uris + .into_iter() + .flat_map(|base_uri| { + WATCHED_FILE_GLOB_PATTERNS + .into_iter() + .map(move |pattern| FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(base_uri.clone()), + pattern: pattern.to_owned(), + }), + kind: None, + }) + }) + .collect() + } + + pub(super) fn register_did_change_watched_files( + &mut self, + initialize_params: &InitializeParams, + init_roots: &InitializeRoots, + ) -> Result<()> { + if !Self::supports_dynamic_watched_files_registration(initialize_params) { + return Ok(()); + } + + let watchers = Self::watched_file_watchers(initialize_params, init_roots); + let options = DidChangeWatchedFilesRegistrationOptions { watchers }; + let registration = Registration { + id: "jrsonnet-lsp.did-change-watched-files".to_owned(), + method: DidChangeWatchedFiles::METHOD.to_owned(), + register_options: Some(serde_json::to_value(options)?), + }; + let params = RegistrationParams { + registrations: vec![registration], + }; + + self.inflight_requests + .send_outgoing_request::(params)?; + info!("Requested dynamic file-watch registration"); + Ok(()) + } + + pub(super) fn request_id_from_number_or_string(id: NumberOrString) -> RequestId { + match id { + NumberOrString::Number(id) => id.into(), + NumberOrString::String(id) => id.into(), + } + } +} diff --git a/crates/jrsonnet-lsp/src/server/workspace_index.rs b/crates/jrsonnet-lsp/src/server/workspace_index.rs new file mode 100644 index 00000000..e2d3ff2b --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/workspace_index.rs @@ -0,0 +1,154 @@ +use std::path::{Path, PathBuf}; + +use jrsonnet_lsp_document::{CanonicalPath, FileId}; +use rustc_hash::FxHashSet; +use tracing::{debug, warn}; + +use super::{InitializeRoots, Server}; + +pub(crate) fn collect_workspace_files_for_roots(roots: &[PathBuf]) -> Vec { + let mut files = Vec::new(); + let mut seen_paths: FxHashSet = FxHashSet::default(); + + for root in roots { + collect_workspace_files(root, |path| { + let key = path.as_path().to_path_buf(); + if seen_paths.insert(key) { + files.push(path); + } + true + }); + } + + files.sort_by(|lhs, rhs| lhs.as_path().cmp(rhs.as_path())); + files +} + +impl Server { + pub(super) fn register_known_workspace_file(&self, file: FileId) { + self.workspace_known_files.write().insert(file); + } + + pub(super) fn remove_known_workspace_file(&self, file: FileId) { + self.workspace_known_files.write().remove(&file); + } + + pub(super) fn mark_workspace_file_dirty(&self, file: FileId) { + self.workspace_dirty_files.write().insert(file); + } + + pub(super) fn mark_workspace_files_dirty(&self, files: impl IntoIterator) { + self.workspace_dirty_files.write().extend(files); + } + + pub(super) fn mark_workspace_file_clean(&self, file: FileId) { + self.workspace_dirty_files.write().remove(&file); + } + + pub(super) fn mark_workspace_known_files_dirty(&self) { + let known = self + .workspace_known_files + .read() + .iter() + .copied() + .collect::>(); + self.mark_workspace_files_dirty(known); + debug!("Marked all known workspace files dirty"); + } + + pub(super) fn workspace_root_paths(init_roots: &InitializeRoots) -> Vec { + let mut roots = Vec::new(); + + if let Some(workspace_folders) = &init_roots.workspace_folders { + for folder in workspace_folders { + if let Ok(path) = CanonicalPath::from_uri(&folder.uri) { + roots.push(path.as_path().to_path_buf()); + } + } + } + + if let Some(root_uri) = &init_roots.root_uri { + if let Ok(path) = CanonicalPath::from_uri(root_uri) { + roots.push(path.as_path().to_path_buf()); + } + } + + if let Some(root_path) = &init_roots.root_path { + roots.push(PathBuf::from(root_path)); + } + + let mut normalized_roots = roots + .into_iter() + .filter_map(Self::normalize_workspace_root) + .collect::>(); + normalized_roots.sort(); + normalized_roots.dedup(); + normalized_roots + } + + fn normalize_workspace_root(path: PathBuf) -> Option { + let path = path.canonicalize().unwrap_or(path); + if path.is_dir() { + return Some(path); + } + if path.is_file() { + return path.parent().map(Path::to_path_buf); + } + None + } +} + +fn collect_workspace_files(root: &Path, mut on_file: F) +where + F: FnMut(CanonicalPath) -> bool, +{ + let mut to_visit = vec![root.to_path_buf()]; + + while let Some(dir) = to_visit.pop() { + let Ok(entries) = std::fs::read_dir(&dir) else { + continue; + }; + + for entry in entries.flatten() { + let path = entry.path(); + let Ok(file_type) = entry.file_type() else { + continue; + }; + + if file_type.is_dir() { + if should_skip_workspace_dir(&path) { + continue; + } + to_visit.push(path); + continue; + } + + if !file_type.is_file() || !is_indexed_workspace_file(&path) { + continue; + } + + match CanonicalPath::try_from_path(&path) { + Ok(path) => { + if !on_file(path) { + return; + } + } + Err(err) => warn!("Skipping workspace file {}: {err}", path.to_string_lossy()), + } + } + } +} + +fn should_skip_workspace_dir(path: &Path) -> bool { + let Some(name) = path.file_name().and_then(|name| name.to_str()) else { + return false; + }; + matches!(name, ".git" | ".jj" | ".svn" | "node_modules" | "target") +} + +fn is_indexed_workspace_file(path: &Path) -> bool { + let Some(extension) = path.extension().and_then(|extension| extension.to_str()) else { + return false; + }; + matches!(extension, "jsonnet" | "libsonnet" | "json") +} diff --git a/crates/jrsonnet-lsp/tests/cross_file_tests.rs b/crates/jrsonnet-lsp/tests/cross_file_tests.rs new file mode 100644 index 00000000..aa43fd99 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/cross_file_tests.rs @@ -0,0 +1,738 @@ +//! Cross-file analysis tests. +//! +//! Tests import graph operations, type caching across files, and cache invalidation. +//! These tests validate the global `TyStore` work enables proper cross-file type sharing. + +use std::{ + fs, + path::{Path, PathBuf}, + sync::Arc, +}; + +use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document, FileId, PathStore}; +use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_inference::{analyze_and_cache, new_shared_cache, TypeAnalysis, TypeCache}; +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore, Ty}; +use tempfile::TempDir; + +/// Helper to create a test file in the temp directory. +fn write_file(dir: &TempDir, name: &str, content: &str) -> PathBuf { + let path = dir.path().join(name); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).expect("expected success"); + } + fs::write(&path, content).expect("expected success"); + path +} + +/// Helper to get canonical path from a temp file. +fn canonical_path(path: &PathBuf) -> CanonicalPath { + CanonicalPath::new(path.canonicalize().expect("expected success")) +} + +/// Create a resolver function for the given base directory. +fn make_resolver(base_dir: &Path) -> impl Fn(&str) -> Option + '_ { + move |import_path: &str| { + let resolved = if Path::new(import_path).is_absolute() { + PathBuf::from(import_path) + } else { + base_dir.join(import_path) + }; + resolved.canonicalize().ok().map(CanonicalPath::new) + } +} + +fn cache_update(cache: &mut TypeCache, file: FileId, ty: GlobalTy, version: i32) { + cache.update(file, ty, version); +} + +fn cache_get(cache: &TypeCache, file: FileId) -> Option { + cache.get(file) +} + +fn cache_is_up_to_date(cache: &TypeCache, file: FileId, version: i32) -> bool { + cache.is_up_to_date(file, version) +} + +fn cache_invalidate(cache: &mut TypeCache, file: FileId) { + cache.invalidate(file); +} + +fn cache_invalidate_many(cache: &mut TypeCache, files: impl IntoIterator) { + cache.invalidate_many(files); +} + +mod import_graph_tests { + use super::*; + + #[test] + fn test_deep_import_chain() { + // Create a chain: file1 -> file2 -> file3 -> file4 -> file5 -> file6 + let tmp = TempDir::new().expect("expected success"); + let base_dir = tmp.path(); + + let file6 = write_file(&tmp, "file6.jsonnet", "{ value: 6 }"); + let file5 = write_file( + &tmp, + "file5.jsonnet", + "local f6 = import 'file6.jsonnet'; { value: 5, next: f6 }", + ); + let file4 = write_file( + &tmp, + "file4.jsonnet", + "local f5 = import 'file5.jsonnet'; { value: 4, next: f5 }", + ); + let file3 = write_file( + &tmp, + "file3.jsonnet", + "local f4 = import 'file4.jsonnet'; { value: 3, next: f4 }", + ); + let file2 = write_file( + &tmp, + "file2.jsonnet", + "local f3 = import 'file3.jsonnet'; { value: 2, next: f3 }", + ); + let file1 = write_file( + &tmp, + "file1.jsonnet", + "local f2 = import 'file2.jsonnet'; { value: 1, next: f2 }", + ); + + let mut graph = ImportGraph::new(PathStore::new()); + let resolver = make_resolver(base_dir); + + // Parse all files + for file in [&file1, &file2, &file3, &file4, &file5, &file6] { + let content = fs::read_to_string(file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(graph.intern(&path), &doc, &resolver); + } + + // file1 should have direct imports: file2 + let path1 = canonical_path(&file1); + let imports1 = graph.imports(graph.intern(&path1)); + assert_eq!(imports1.len(), 1, "file1 should import exactly one file"); + + // file6 should have no imports + let path6 = canonical_path(&file6); + let imports6 = graph.imports(graph.intern(&path6)); + assert!(imports6.is_empty(), "file6 should have no imports"); + + // Transitive importers of file6 should include file1-5 + let file6 = graph.file(&path6).expect("file6 should be interned"); + let importers = graph.transitive_importers(file6); + assert!( + importers.len() >= 5, + "file6 should have at least 5 transitive importers" + ); + } + + #[test] + fn test_diamond_dependency() { + // Create a diamond: A imports B and C, both B and C import D + // A + // / \ + // B C + // \ / + // D + let tmp = TempDir::new().expect("expected success"); + let base_dir = tmp.path(); + + let file_d = write_file(&tmp, "d.jsonnet", "{ shared: 'value' }"); + let file_b = write_file( + &tmp, + "b.jsonnet", + "local d = import 'd.jsonnet'; { b_field: d.shared }", + ); + let file_c = write_file( + &tmp, + "c.jsonnet", + "local d = import 'd.jsonnet'; { c_field: d.shared }", + ); + let file_a = write_file( + &tmp, + "a.jsonnet", + r" + local b = import 'b.jsonnet'; + local c = import 'c.jsonnet'; + { a_field: b.b_field + c.c_field } + ", + ); + + let mut graph = ImportGraph::new(PathStore::new()); + let resolver = make_resolver(base_dir); + + for file in [&file_d, &file_b, &file_c, &file_a] { + let content = fs::read_to_string(file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(graph.intern(&path), &doc, &resolver); + } + + // A should import B and C + let path_a = canonical_path(&file_a); + let imports_a = graph.imports(graph.intern(&path_a)); + assert_eq!( + imports_a.len(), + 2, + "A should import exactly 2 files (B and C)" + ); + + // D should be imported by both B and C + let path_d = canonical_path(&file_d); + let file_d = graph.file(&path_d).expect("d should be interned"); + let importers_d = graph.transitive_importers(file_d); + // D's transitive importers: B, C, A + assert_eq!( + importers_d.len(), + 3, + "D should have 3 transitive importers (B, C, A)" + ); + } + + #[test] + fn test_import_graph_removal() { + let tmp = TempDir::new().expect("expected success"); + let base_dir = tmp.path(); + + let lib = write_file(&tmp, "lib.jsonnet", "{ helper: 42 }"); + let main = write_file( + &tmp, + "main.jsonnet", + "local lib = import 'lib.jsonnet'; lib.helper", + ); + + let mut graph = ImportGraph::new(PathStore::new()); + let resolver = make_resolver(base_dir); + + // Add both files + let lib_content = fs::read_to_string(&lib).expect("expected success"); + let lib_doc = Document::new(lib_content, DocVersion::new(1)); + let lib_path = canonical_path(&lib); + graph.update_file(graph.intern(&lib_path), &lib_doc, &resolver); + + let main_content = fs::read_to_string(&main).expect("expected success"); + let main_doc = Document::new(main_content, DocVersion::new(1)); + let main_path = canonical_path(&main); + graph.update_file(graph.intern(&main_path), &main_doc, &resolver); + + // Verify import relationship + assert_eq!(graph.imports(graph.intern(&main_path)).len(), 1); + + // Remove lib + graph.remove_file(graph.intern(&lib_path)); + + // Graph should handle missing targets gracefully + let imports = graph.imports(graph.intern(&main_path)); + // main still imports lib (by path), even if lib is removed from graph + assert_eq!(imports.len(), 1); + } + + #[test] + fn test_multiple_imports_same_file() { + // Test that importing the same file from multiple locations is tracked correctly + let tmp = TempDir::new().expect("expected success"); + let base_dir = tmp.path(); + + let shared = write_file(&tmp, "shared.jsonnet", "{ x: 1 }"); + let user1 = write_file( + &tmp, + "user1.jsonnet", + "local s = import 'shared.jsonnet'; s.x", + ); + let user2 = write_file( + &tmp, + "user2.jsonnet", + "local s = import 'shared.jsonnet'; s.x + 1", + ); + let user3 = write_file( + &tmp, + "user3.jsonnet", + "local s = import 'shared.jsonnet'; s.x * 2", + ); + + let mut graph = ImportGraph::new(PathStore::new()); + let resolver = make_resolver(base_dir); + + for file in [&shared, &user1, &user2, &user3] { + let content = fs::read_to_string(file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(graph.intern(&path), &doc, &resolver); + } + + // shared should be imported by user1, user2, user3 + let shared_path = canonical_path(&shared); + let shared = graph.file(&shared_path).expect("shared should be interned"); + let importers = graph.transitive_importers(shared); + assert_eq!(importers.len(), 3, "shared should have 3 importers"); + } +} + +mod type_cache_tests { + use super::*; + + #[test] + fn test_cache_basic_types() { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let file1 = write_file(&tmp, "number.jsonnet", "42"); + let file2 = write_file(&tmp, "string.jsonnet", "\"hello\""); + let file3 = write_file(&tmp, "bool.jsonnet", "true"); + + let path1 = canonical_path(&file1); + let path2 = canonical_path(&file2); + let path3 = canonical_path(&file3); + let file1_id = cache.intern(&path1); + let file2_id = cache.intern(&path2); + let file3_id = cache.intern(&path3); + + // Cache different types + cache_update(&mut cache, file1_id, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2_id, GlobalTy::STRING, 1); + cache_update(&mut cache, file3_id, GlobalTy::BOOL, 1); + + // Retrieve and verify + assert_eq!(cache_get(&cache, file1_id), Some(GlobalTy::NUMBER)); + assert_eq!(cache_get(&cache, file2_id), Some(GlobalTy::STRING)); + assert_eq!(cache_get(&cache, file3_id), Some(GlobalTy::BOOL)); + } + + #[test] + fn test_cache_version_tracking() { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let file = write_file(&tmp, "test.jsonnet", "1"); + let path = canonical_path(&file); + let file_id = cache.intern(&path); + + // Initial version + cache_update(&mut cache, file_id, GlobalTy::NUMBER, 1); + assert!(cache_is_up_to_date(&cache, file_id, 1)); + assert!(!cache_is_up_to_date(&cache, file_id, 2)); + + // Update version + cache_update(&mut cache, file_id, GlobalTy::STRING, 2); + assert!(!cache_is_up_to_date(&cache, file_id, 1)); + assert!(cache_is_up_to_date(&cache, file_id, 2)); + } + + #[test] + fn test_cache_invalidation() { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let file1 = write_file(&tmp, "a.jsonnet", "1"); + let file2 = write_file(&tmp, "b.jsonnet", "2"); + let file3 = write_file(&tmp, "c.jsonnet", "3"); + + let path1 = canonical_path(&file1); + let path2 = canonical_path(&file2); + let path3 = canonical_path(&file3); + let file1_id = cache.intern(&path1); + let file2_id = cache.intern(&path2); + let file3_id = cache.intern(&path3); + + cache_update(&mut cache, file1_id, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2_id, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file3_id, GlobalTy::NUMBER, 1); + + assert_eq!(cache.len(), 3); + + // Invalidate one + cache_invalidate(&mut cache, file2_id); + assert_eq!(cache.len(), 2); + cache_get(&cache, file1_id).expect("path1 should still be cached"); + assert_eq!(cache_get(&cache, file2_id), None); + cache_get(&cache, file3_id).expect("path3 should still be cached"); + } + + #[test] + fn test_cache_invalidate_many() { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let files: Vec<_> = (0..10) + .map(|i| { + let file = write_file(&tmp, &format!("file{i}.jsonnet"), &format!("{i}")); + canonical_path(&file) + }) + .collect(); + let file_ids: Vec<_> = files.iter().map(|path| cache.intern(path)).collect(); + + // Cache all + for file in &file_ids { + cache_update(&mut cache, *file, GlobalTy::NUMBER, 1); + } + assert_eq!(cache.len(), 10); + + // Invalidate half + let to_invalidate: Vec<_> = file_ids.iter().step_by(2).copied().collect(); + cache_invalidate_many(&mut cache, to_invalidate); + + // Should have 5 remaining + assert_eq!(cache.len(), 5); + } +} + +mod cross_file_type_tests { + use super::*; + + #[test] + fn test_analyze_and_cache_basic() { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let file = write_file(&tmp, "number.jsonnet", "42"); + let path = canonical_path(&file); + + let content = fs::read_to_string(&file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + + // Analyze and cache + let ty = analyze_and_cache(&path, &doc, &cache); + assert_eq!(ty, Ty::NUMBER); + + // Should be cached + let cached = { + let read_cache = cache.read(); + let file = read_cache.file(&path).expect("path should be interned"); + read_cache.get(file) + }; + assert_eq!(cached, Some(GlobalTy::NUMBER)); + } + + #[test] + fn test_analyze_and_cache_different_types() { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); + let tmp = TempDir::new().expect("expected success"); + + // Test various Jsonnet literal types + let test_cases = [ + ("number.jsonnet", "42", Ty::NUMBER), + ("string.jsonnet", "\"hello\"", Ty::STRING), + ("bool.jsonnet", "true", Ty::TRUE), // Literal boolean type + ("null.jsonnet", "null", Ty::NULL), + ]; + + for (name, content, expected_ty) in test_cases { + let file = write_file(&tmp, name, content); + let path = canonical_path(&file); + let doc = Document::new(content.to_string(), DocVersion::new(1)); + + let ty = analyze_and_cache(&path, &doc, &cache); + assert_eq!(ty, expected_ty, "Type mismatch for {name}"); + } + } + + #[test] + fn test_cache_hit_on_same_version() { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let file = write_file(&tmp, "test.jsonnet", "42"); + let path = canonical_path(&file); + let doc = Document::new("42".to_string(), DocVersion::new(1)); + + // First analysis + let ty1 = analyze_and_cache(&path, &doc, &cache); + + // Second analysis with same version should hit cache + let ty2 = analyze_and_cache(&path, &doc, &cache); + + assert_eq!(ty1, ty2); + assert_eq!(ty1, Ty::NUMBER); + } + + #[test] + fn test_cache_miss_on_new_version() { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let file = write_file(&tmp, "test.jsonnet", "42"); + let path = canonical_path(&file); + + // Version 1: number + let doc1 = Document::new("42".to_string(), DocVersion::new(1)); + let ty1 = analyze_and_cache(&path, &doc1, &cache); + assert_eq!(ty1, Ty::NUMBER); + + // Version 2: string - should re-analyze + let doc2 = Document::new("\"hello\"".to_string(), DocVersion::new(2)); + let ty2 = analyze_and_cache(&path, &doc2, &cache); + assert_eq!(ty2, Ty::STRING); + } + + #[test] + fn test_shared_global_store() { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); + let tmp = TempDir::new().expect("expected success"); + + // Analyze multiple files + let file1 = write_file(&tmp, "a.jsonnet", "1"); + let file2 = write_file(&tmp, "b.jsonnet", "2"); + + let path1 = canonical_path(&file1); + let path2 = canonical_path(&file2); + + let doc1 = Document::new("1".to_string(), DocVersion::new(1)); + let doc2 = Document::new("2".to_string(), DocVersion::new(1)); + + let ty1 = analyze_and_cache(&path1, &doc1, &cache); + let ty2 = analyze_and_cache(&path2, &doc2, &cache); + + // Both should be NUMBER + assert_eq!(ty1, Ty::NUMBER); + assert_eq!(ty2, Ty::NUMBER); + + // They should be the same type ID (from global store) + assert_eq!(ty1, ty2); + } +} + +mod transitive_update_tests { + use super::*; + + #[test] + fn test_transitive_invalidation_chain() { + // When a base file changes, all transitive dependents should be invalidated + let tmp = TempDir::new().expect("expected success"); + let base_dir = tmp.path(); + + let base = write_file(&tmp, "base.jsonnet", "{ x: 1 }"); + let mid = write_file(&tmp, "mid.jsonnet", "local b = import 'base.jsonnet'; b"); + let top_file = write_file(&tmp, "top.jsonnet", "local m = import 'mid.jsonnet'; m"); + + let mut graph = ImportGraph::new(PathStore::new()); + let resolver = make_resolver(base_dir); + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + + // Build graph + for file in [&base, &mid, &top_file] { + let content = fs::read_to_string(file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(graph.intern(&path), &doc, &resolver); + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::ANY, 1); // Placeholder type + } + + let base_path = canonical_path(&base); + let mid_path = canonical_path(&mid); + let top_path = canonical_path(&top_file); + let base_file = cache.intern(&base_path); + let mid_file = cache.intern(&mid_path); + let top_file = cache.intern(&top_path); + + // All should be cached + cache_get(&cache, base_file).expect("base should be cached"); + cache_get(&cache, mid_file).expect("mid should be cached"); + cache_get(&cache, top_file).expect("top should be cached"); + + // Simulate base file change - need to invalidate transitive importers + let graph_base = graph.file(&base_path).expect("base should be interned"); + let importers = graph.transitive_importers(graph_base); + cache_invalidate(&mut cache, base_file); + cache_invalidate_many(&mut cache, importers); + + // All should be invalidated + assert_eq!(cache_get(&cache, base_file), None); + assert_eq!(cache_get(&cache, mid_file), None); + assert_eq!(cache_get(&cache, top_file), None); + } + + #[test] + fn test_partial_invalidation() { + // When a leaf file changes, only its importers should be affected + let tmp = TempDir::new().expect("expected success"); + let base_dir = tmp.path(); + + let lib1 = write_file(&tmp, "lib1.jsonnet", "{ a: 1 }"); + let lib2 = write_file(&tmp, "lib2.jsonnet", "{ b: 2 }"); + let main = write_file(&tmp, "main.jsonnet", "local l1 = import 'lib1.jsonnet'; l1"); + + let mut graph = ImportGraph::new(PathStore::new()); + let resolver = make_resolver(base_dir); + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + + // Build graph - main imports lib1, not lib2 + for file in [&lib1, &lib2, &main] { + let content = fs::read_to_string(file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(graph.intern(&path), &doc, &resolver); + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::ANY, 1); + } + + let lib1_path = canonical_path(&lib1); + let lib2_path = canonical_path(&lib2); + let main_path = canonical_path(&main); + let lib1_file = cache.intern(&lib1_path); + let lib2_file = cache.intern(&lib2_path); + let main_file = cache.intern(&main_path); + + // Change lib1 - should invalidate lib1 and main, but not lib2 + let graph_lib1 = graph.file(&lib1_path).expect("lib1 should be interned"); + let importers = graph.transitive_importers(graph_lib1); + cache_invalidate(&mut cache, lib1_file); + cache_invalidate_many(&mut cache, importers); + + assert_eq!(cache_get(&cache, lib1_file), None); + assert_eq!(cache_get(&cache, main_file), None); + cache_get(&cache, lib2_file).expect("lib2 should be unchanged"); + } + + #[test] + fn test_diamond_invalidation() { + // When D changes in A -> B,C -> D diamond, all should be invalidated + let tmp = TempDir::new().expect("expected success"); + let base_dir = tmp.path(); + + let d = write_file(&tmp, "d.jsonnet", "{ shared: 1 }"); + let b = write_file(&tmp, "b.jsonnet", "local d = import 'd.jsonnet'; d"); + let c = write_file(&tmp, "c.jsonnet", "local d = import 'd.jsonnet'; d"); + let a = write_file( + &tmp, + "a.jsonnet", + r" + local b = import 'b.jsonnet'; + local c = import 'c.jsonnet'; + { b: b, c: c } + ", + ); + + let mut graph = ImportGraph::new(PathStore::new()); + let resolver = make_resolver(base_dir); + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + + for file in [&d, &b, &c, &a] { + let content = fs::read_to_string(file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(graph.intern(&path), &doc, &resolver); + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::ANY, 1); + } + + let d_path = canonical_path(&d); + let d_file = cache.intern(&d_path); + let b_file = cache.intern(&canonical_path(&b)); + let c_file = cache.intern(&canonical_path(&c)); + let a_file = cache.intern(&canonical_path(&a)); + + // Change D + let graph_d = graph.file(&d_path).expect("d should be interned"); + let importers = graph.transitive_importers(graph_d); + cache_invalidate(&mut cache, d_file); + cache_invalidate_many(&mut cache, importers); + + // All should be invalidated (D is transitively imported by all) + assert!(cache_get(&cache, d_file).is_none()); + assert!(cache_get(&cache, b_file).is_none()); + assert!(cache_get(&cache, c_file).is_none()); + assert!(cache_get(&cache, a_file).is_none()); + } +} + +mod type_analysis_imports_tests { + use super::*; + + #[test] + fn test_analyze_object_type() { + let global = Arc::new(GlobalTyStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let file = write_file(&tmp, "obj.jsonnet", "{ a: 1, b: 'hello', c: true }"); + let content = fs::read_to_string(&file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + + let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); + let ty = analysis.document_type(); + + // Should be an object type (not primitive) + assert_ne!(ty, Ty::NUMBER); + assert_ne!(ty, Ty::STRING); + assert_ne!(ty, Ty::BOOL); + assert_ne!(ty, Ty::NULL); + } + + #[test] + fn test_analyze_array_type() { + let global = Arc::new(GlobalTyStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let file = write_file(&tmp, "arr.jsonnet", "[1, 2, 3]"); + let content = fs::read_to_string(&file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + + let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); + let ty = analysis.document_type(); + + // Should be an array type (not primitive) + assert_ne!(ty, Ty::NUMBER); + assert_ne!(ty, Ty::STRING); + } + + #[test] + fn test_analyze_function_type() { + let global = Arc::new(GlobalTyStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let file = write_file(&tmp, "func.jsonnet", "function(x) x + 1"); + let content = fs::read_to_string(&file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + + let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); + let ty = analysis.document_type(); + + // Should be a function type (not primitive) + assert_ne!(ty, Ty::NUMBER); + assert_ne!(ty, Ty::STRING); + } + + #[test] + fn test_analyze_local_binding() { + let global = Arc::new(GlobalTyStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let file = write_file(&tmp, "local.jsonnet", "local x = 42; x"); + let content = fs::read_to_string(&file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + + let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); + let ty = analysis.document_type(); + + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_analyze_conditional() { + let global = Arc::new(GlobalTyStore::new()); + let tmp = TempDir::new().expect("expected success"); + + let file = write_file(&tmp, "cond.jsonnet", "if true then 1 else 2"); + let content = fs::read_to_string(&file).expect("expected success"); + let doc = Document::new(content, DocVersion::new(1)); + + let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); + let ty = analysis.document_type(); + + assert_eq!(ty, Ty::NUMBER); + } +} diff --git a/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs b/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs new file mode 100644 index 00000000..2abc89f1 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs @@ -0,0 +1,136 @@ +use std::{ + fs, + path::{Path, PathBuf}, +}; + +#[derive(Debug, Clone, PartialEq, Eq)] +struct MarkdownCodeBlock { + language: String, + start_line: usize, + code: String, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct JsonnetDocExample { + path: String, + start_line: usize, + code: String, +} + +fn docs_lsp_dir() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../docs/lsp") +} + +fn docs_lsp_markdown_paths() -> Vec { + let mut paths = fs::read_dir(docs_lsp_dir()) + .expect("failed to list docs/lsp") + .filter_map(Result::ok) + .map(|entry| entry.path()) + .filter(|path| path.extension().is_some_and(|ext| ext == "md")) + .collect::>(); + paths.sort(); + paths +} + +fn relative_path(path: &Path) -> String { + let workspace_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../.."); + let relative = path + .strip_prefix(workspace_root) + .expect("failed to relativize docs path"); + relative.to_string_lossy().into_owned() +} + +fn extract_markdown_fenced_blocks(markdown: &str) -> Vec { + let mut blocks = Vec::new(); + let mut current_fence_len: Option = None; + let mut current_language = String::new(); + let mut current_start_line = 0usize; + let mut current_lines = Vec::new(); + + for (idx, line) in markdown.lines().enumerate() { + let trimmed = line.trim_start(); + let backticks = trimmed.chars().take_while(|&ch| ch == '`').count(); + if backticks >= 3 { + let rest = &trimmed[backticks..]; + + if let Some(fence_len) = current_fence_len { + if backticks >= fence_len && rest.trim().is_empty() { + blocks.push(MarkdownCodeBlock { + language: current_language.clone(), + start_line: current_start_line, + code: current_lines.join("\n"), + }); + current_fence_len = None; + current_language.clear(); + current_start_line = 0; + current_lines.clear(); + continue; + } + } else { + current_fence_len = Some(backticks); + current_language = rest + .split_ascii_whitespace() + .next() + .unwrap_or_default() + .to_ascii_lowercase(); + current_start_line = idx + 2; + current_lines.clear(); + continue; + } + } + + if current_fence_len.is_some() { + current_lines.push(line.to_owned()); + } + } + + blocks +} + +fn collect_jsonnet_doc_examples() -> Vec { + let mut examples = Vec::new(); + for path in docs_lsp_markdown_paths() { + let text = fs::read_to_string(&path).expect("failed to read docs/lsp markdown file"); + let source_path = relative_path(&path); + for block in extract_markdown_fenced_blocks(&text) { + if block.language == "jsonnet" { + examples.push(JsonnetDocExample { + path: source_path.clone(), + start_line: block.start_line, + code: block.code, + }); + } + } + } + examples +} + +#[test] +fn lsp_docs_jsonnet_examples_parse_cleanly() { + let examples = collect_jsonnet_doc_examples(); + assert!( + !examples.is_empty(), + "expected at least one ```jsonnet fenced block in docs/lsp" + ); + + let failures = examples + .iter() + .filter_map(|example| { + let (_, errors) = jrsonnet_rowan_parser::parse(&example.code); + if errors.is_empty() { + None + } else { + Some(format!( + "{}:{} failed with parse errors: {errors:?}\n---\n{}\n---", + example.path, example.start_line, example.code + )) + } + }) + .collect::>(); + + assert!( + failures.is_empty(), + "jsonnet examples in docs/lsp must parse cleanly:\n{}", + failures.join("\n\n") + ); +} diff --git a/crates/jrsonnet-lsp/tests/e2e_scenario_tests.rs b/crates/jrsonnet-lsp/tests/e2e_scenario_tests.rs new file mode 100644 index 00000000..4276b2dc --- /dev/null +++ b/crates/jrsonnet-lsp/tests/e2e_scenario_tests.rs @@ -0,0 +1,18 @@ +//! Integration-test entry point for test framework modules. +//! +//! E2E coverage is now driven by YAML scenarios in `tests/scenarios/**`. + +use std::path::PathBuf; + +use jrsonnet_lsp_scenario::{run_yaml_fixture, ScenarioFixtureError}; +use rstest::rstest; + +#[rstest] +fn scenario_yaml_fixture( + #[files("tests/scenarios/**/*.yaml")] fixture: PathBuf, +) -> Result<(), ScenarioFixtureError> { + run_yaml_fixture(&fixture, |connection| { + let server = jrsonnet_lsp::server::Server::new(connection); + let _ = server.run(); + }) +} diff --git a/crates/jrsonnet-lsp/tests/integration/features.rs b/crates/jrsonnet-lsp/tests/integration/features.rs new file mode 100644 index 00000000..559d6cd1 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration/features.rs @@ -0,0 +1,1137 @@ +use super::*; + +#[test] +fn test_document_highlight() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/highlight.jsonnet"; + let text = "local x = 1; x + x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(document_highlight_request(2, uri, 0, 13))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "Document highlight should succeed" + ); + + let highlights: Option> = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + let highlights = highlights.unwrap_or_default(); + assert_eq!(highlights.len(), 3); + assert!( + highlights.iter().any(|highlight| { + highlight.range.start.character == 6 + && highlight.kind == Some(lsp_types::DocumentHighlightKind::WRITE) + }), + "Definition should be highlighted as WRITE" + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_inlay_hint() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/inlay.jsonnet"; + let text = "local x = 1; x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(inlay_hint_request(2, uri, 0, 0, 0, 50))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Inlay hint should succeed"); + + let hints: Option> = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + let hints = hints.unwrap_or_default(); + let hints_json = serde_json::to_value(&hints).expect("hints should serialize"); + let expected_json = serde_json::json!([{ + "position": { "line": 0, "character": 7 }, + "label": ": number", + "kind": 1, + "paddingLeft": true + }]); + assert_eq!(hints_json, expected_json); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_inlay_hint_config_updates_via_configuration_change() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/inlay-config.jsonnet"; + let text = "{ local x = 1, z: x, a: 1 }"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(inlay_hint_request(2, uri, 0, 0, 0, 80))) + .expect("expected success"); + let before = recv_response(&client_conn, 2); + assert!(before.error.is_none(), "Inlay hint should succeed"); + let before_hints: Option> = + serde_json::from_value(before.result.expect("should have result")) + .expect("expected success"); + let before_hints = before_hints.unwrap_or_default(); + let before_json = serde_json::to_value(&before_hints).expect("hints should serialize"); + let expected_before = serde_json::json!([{ + "position": { "line": 0, "character": 9 }, + "label": ": number", + "kind": 1, + "paddingLeft": true + }]); + assert_eq!(before_json, expected_before); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "inlayHints": { + "local": "off", + "objectLocal": "off", + "objectMembers": "fields" + } + } + })), + )) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(inlay_hint_request(3, uri, 0, 0, 0, 80))) + .expect("expected success"); + let after = recv_response(&client_conn, 3); + assert!(after.error.is_none(), "Inlay hint should succeed"); + let after_hints: Option> = + serde_json::from_value(after.result.expect("should have result")) + .expect("expected success"); + let after_hints = after_hints.unwrap_or_default(); + let after_json = serde_json::to_value(&after_hints).expect("hints should serialize"); + let expected_after = serde_json::json!([ + { + "position": { "line": 0, "character": 16 }, + "label": ": number", + "kind": 1, + "paddingLeft": true + }, + { + "position": { "line": 0, "character": 22 }, + "label": ": number", + "kind": 1, + "paddingLeft": true + } + ]); + assert_eq!(after_json, expected_after); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_action_unused_variable_quickfix() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/code-action.jsonnet"; + let text = "local x = 1; 42"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + + let diagnostic = unused_variable_diagnostic(); + let actions = request_code_actions(&client_conn, 2, uri, vec![diagnostic.clone()], None); + assert_eq!( + actions, + Some(expected_unused_variable_quickfix(uri, diagnostic.clone())) + ); + + // Requesting source fix-all actions should return the document-level fix-all action. + let filtered_actions = request_code_actions( + &client_conn, + 3, + uri, + vec![diagnostic.clone()], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!( + filtered_actions, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().expect("expected success"), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 11, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }, + )]) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_action_policy_updates_via_configuration_change() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/code-action-policy.jsonnet"; + let text = "local x = import \"foo.libsonnet\"; 42"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + + let diagnostic = unused_variable_diagnostic(); + let actions_before = request_code_actions(&client_conn, 2, uri, vec![diagnostic.clone()], None); + assert_eq!( + actions_before, + Some(expected_unused_import_binding_actions( + uri, + diagnostic.clone(), + )) + ); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "codeActions": { + "removeUnused": "nonImportBindings" + } + } + })), + )) + .expect("expected success"); + + let actions_after = request_code_actions(&client_conn, 3, uri, vec![diagnostic.clone()], None); + assert_eq!( + actions_after, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().expect("expected success"), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }, + )]) + ); + + let fix_all_after = request_code_actions( + &client_conn, + 4, + uri, + vec![diagnostic], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!(fix_all_after, None); + + client_conn + .sender + .send(Message::Request(shutdown_request(5))) + .expect("expected success"); + let _ = recv_response(&client_conn, 5); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_action_comment_policy_updates_via_configuration_change() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/code-action-comment-policy.jsonnet"; + let text = "// heading\nlocal x = 1;\n42"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + + let diagnostic = lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::WARNING), + code: Some(lsp_types::NumberOrString::String( + "unused-variable".to_string(), + )), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + }; + + let fix_all_before = request_code_actions( + &client_conn, + 2, + uri, + vec![diagnostic.clone()], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!( + fix_all_before, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().expect("expected success"), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 1, + character: 0, + }, + end: Position { + line: 1, + character: 11, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }, + )]) + ); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "codeActions": { + "removeUnusedComments": "above" + } + } + })), + )) + .expect("expected success"); + + let fix_all_after = request_code_actions( + &client_conn, + 3, + uri, + vec![diagnostic], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!( + fix_all_after, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::WARNING), + code: Some(lsp_types::NumberOrString::String( + "unused-variable".to_string(), + )), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + }]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().expect("expected success"), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 1, + character: 11, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }, + )]) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_text_document_references() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/find-refs-command.jsonnet"; + let text = "local x = 1; x + x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(references_request(2, uri, 0, 13, false))) + .expect("expected success"); + let refs_response = recv_response(&client_conn, 2); + assert!( + refs_response.error.is_none(), + "textDocument/references should succeed" + ); + let refs: Option> = + serde_json::from_value(refs_response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + refs.unwrap_or_default(), + expected_find_references(uri, false) + ); + + client_conn + .sender + .send(Message::Request(references_request(3, uri, 0, 13, true))) + .expect("expected success"); + let refs_with_declaration_response = recv_response(&client_conn, 3); + assert!( + refs_with_declaration_response.error.is_none(), + "textDocument/references should succeed" + ); + let refs_with_declaration: Option> = serde_json::from_value( + refs_with_declaration_response + .result + .expect("should have result"), + ) + .expect("expected success"); + assert_eq!( + refs_with_declaration.unwrap_or_default(), + expected_find_references(uri, true) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_execute_command_unknown_returns_invalid_params_error() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.unknownCommand", + vec![], + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert_eq!(response.result, None); + let error = response + .error + .expect("unknown execute command should return an error"); + assert_eq!(error.code, lsp_server::ErrorCode::InvalidParams as i32); + assert_matches!( + parse_request_error_data(&error), + RequestErrorData::UnknownExecuteCommand { command } + if command == "jrsonnet.unknownCommand" + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_eval_commands_use_tanka_import_roots() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let env_dir = root.join("environments").join("dev"); + let vendor_dir = root.join("vendor"); + fs::create_dir_all(&env_dir).expect("environment directory should be created"); + fs::create_dir_all(&vendor_dir).expect("vendor directory should be created"); + fs::write(root.join("jsonnetfile.json"), "{}").expect("jsonnetfile should be written"); + + let lib_path = vendor_dir.join("lib.libsonnet"); + let main_path = env_dir.join("main.jsonnet"); + fs::write(&lib_path, "{ answer: 42 }").expect("vendor lib should be written"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; lib.answer"#, + ) + .expect("main should be written"); + + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + serde_json::json!({ + "resolvePathsWithTanka": true + }), + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(custom_eval_file_request(2, &main_uri))) + .expect("expected success"); + let eval_file_custom_response = recv_response(&client_conn, 2); + assert!( + eval_file_custom_response.error.is_none(), + "jrsonnet/evalFile request should succeed" + ); + assert_eq!( + eval_file_custom_response + .result + .expect("jrsonnet/evalFile should return a result"), + serde_json::json!(42) + ); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 3, + "jrsonnet.evalFile", + vec![serde_json::Value::String(main_uri.clone())], + ))) + .expect("expected success"); + let eval_file_bridge_response = recv_response(&client_conn, 3); + assert!( + eval_file_bridge_response.error.is_none(), + "evalFile executeCommand bridge should succeed" + ); + assert_eq!( + eval_file_bridge_response + .result + .expect("evalFile bridge should return a result"), + serde_json::json!(42) + ); + + client_conn + .sender + .send(Message::Request(custom_eval_expression_request( + 4, + r#"(import "lib.libsonnet").answer"#, + Some(&main_uri), + ))) + .expect("expected success"); + let eval_expression_response = recv_response(&client_conn, 4); + assert!( + eval_expression_response.error.is_none(), + "jrsonnet/evalExpression request should succeed" + ); + assert_eq!( + eval_expression_response + .result + .expect("jrsonnet/evalExpression should return a result"), + serde_json::json!(42) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(5))) + .expect("expected success"); + let _ = recv_response(&client_conn, 5); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_semantic_tokens_range_request() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/semantic-range.jsonnet"; + let text = "local first = 1\nlocal second = first + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(semantic_tokens_range_request( + 2, uri, 1, 0, 1, 100, + ))) + .expect("expected success"); + + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "semantic tokens range request should succeed" + ); + let tokens: Option = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + let tokens = tokens.expect("semantic tokens range should be returned"); + let expected = encode_semantic_tokens(vec![ + semantic_token(1, 0, 5, SemanticTokenTypeName::Keyword, &[]), + semantic_token(1, 6, 6, SemanticTokenTypeName::Variable, &[]), + semantic_token(1, 13, 1, SemanticTokenTypeName::Operator, &[]), + semantic_token(1, 21, 1, SemanticTokenTypeName::Operator, &[]), + semantic_token(1, 23, 1, SemanticTokenTypeName::Number, &[]), + ]); + assert_eq!(tokens, expected, "semantic tokens range mismatch"); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_lens_resolve_request() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/code-lens-resolve.jsonnet"; + let text = "local x = 1; x + x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(code_lens_request(2, uri))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "code lens request should succeed"); + let lenses: Vec = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + assert!(!lenses.is_empty(), "expected code lenses for test document"); + let evaluate_lens = lenses + .into_iter() + .find(|lens| { + lens.command + .as_ref() + .is_some_and(|command| command.command == "jrsonnet.evalFile") + }) + .expect("expected evaluate code lens"); + + client_conn + .sender + .send(Message::Request(code_lens_resolve_request( + 3, + evaluate_lens.clone(), + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 3); + assert!( + response.error.is_none(), + "code lens resolve request should succeed" + ); + let resolved: lsp_types::CodeLens = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + assert_eq!(resolved, evaluate_lens); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_lens_evaluate_command_executes_and_returns_result() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/code-lens-evaluate.jsonnet"; + let text = "local x = 1; x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(code_lens_request(2, uri))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "code lens request should succeed"); + let lenses: Vec = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + let evaluate_command = lenses + .into_iter() + .filter_map(|lens| lens.command) + .find(|command| command.command == "jrsonnet.evalFile") + .expect("expected evaluate code lens command"); + assert_eq!(evaluate_command.title, "Evaluate"); + assert_eq!( + evaluate_command.arguments, + Some(vec![serde_json::Value::String(uri.to_string())]), + "evaluate lens should target the opened file URI", + ); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 3, + &evaluate_command.command, + evaluate_command.arguments.unwrap_or_default(), + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 3); + assert!( + response.error.is_none(), + "executeCommand for evaluate lens should succeed", + ); + assert_eq!( + response + .result + .expect("evaluate lens execution should return a result"), + serde_json::json!(1), + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_cancel_request_returns_request_canceled_error() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/cancel-code-lens.jsonnet"; + let mut text = String::new(); + for index in 0..20_000 { + writeln!(&mut text, "local value_{index} = {index};") + .expect("writing to String should succeed"); + } + text.push_str("value_19999\n"); + + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, &text))) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(code_lens_request(2, uri))) + .expect("expected success"); + client_conn + .sender + .send(Message::Notification(cancel_request_notification(2))) + .expect("expected success"); + + let response = recv_response(&client_conn, 2); + assert_eq!(response.result, None); + let error = response + .error + .expect("cancelled request should return request-canceled error"); + assert_eq!(error.code, lsp_server::ErrorCode::RequestCanceled as i32); + assert_matches!( + parse_request_error_data(&error), + RequestErrorData::RequestCanceled { method } + if method == "textDocument/codeLens" + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_lens_resolve_invalid_params_returns_invalid_params_error() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(Request::new( + 2.into(), + CodeLensResolve::METHOD.to_string(), + json!({"not": "a code lens"}), + ))) + .expect("expected success"); + + let response = recv_response(&client_conn, 2); + assert_eq!(response.result, None); + let error = response + .error + .expect("invalid code lens resolve params should return an error"); + assert_eq!(error.code, lsp_server::ErrorCode::InvalidParams as i32); + assert_matches!( + parse_request_error_data(&error), + RequestErrorData::InvalidParams { method } if method == "codeLens/resolve" + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} diff --git a/crates/jrsonnet-lsp/tests/integration/formatting.rs b/crates/jrsonnet-lsp/tests/integration/formatting.rs new file mode 100644 index 00000000..11eeca5d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration/formatting.rs @@ -0,0 +1,496 @@ +use std::{any::Any, thread::JoinHandle}; + +use rstest::rstest; +use thiserror::Error; + +use super::*; + +type Result = std::result::Result; + +#[derive(Debug, Error)] +enum FormatTestError { + #[error("failed to send LSP message")] + SendMessage { + #[source] + source: Box>, + }, + #[error("formatting request #{request_id} returned error: {error}")] + FormattingRequestFailed { request_id: i32, error: String }, + #[error("formatting request #{request_id} missing result")] + MissingFormattingResult { request_id: i32 }, + #[error("failed to parse formatting response edits")] + FormattingResultParse(#[from] serde_json::Error), + #[error(transparent)] + JoinServerThread(#[from] JoinThreadPanic), + #[error(transparent)] + JoinServerThreadPayload(#[from] JoinThreadPanicParseError), +} + +impl From> for FormatTestError { + fn from(source: crossbeam_channel::SendError) -> Self { + Self::SendMessage { + source: Box::new(source), + } + } +} + +#[derive(Debug, Error)] +#[error("server thread panicked while joining: {message}")] +struct JoinThreadPanic { + message: String, +} + +#[derive(Debug, Error)] +#[error("server thread panicked while joining: non-string panic payload")] +struct JoinThreadPanicParseError; + +impl TryFrom> for JoinThreadPanic { + type Error = JoinThreadPanicParseError; + + // `JoinHandle::join` returns an opaque panic payload (`Any`), so we downcast common string forms + // to keep panic diagnostics readable. Non-string payloads are reported explicitly. + fn try_from(payload: Box) -> std::result::Result { + let payload = match payload.downcast::() { + Ok(value) => return Ok(Self { message: *value }), + Err(payload) => payload, + }; + + let message = if let Ok(value) = payload.downcast::<&'static str>() { + (*value).to_string() + } else { + return Err(JoinThreadPanicParseError); + }; + Ok(Self { message }) + } +} + +impl From> for FormatTestError { + // Bridge `join()`'s panic payload into our test error so `handle.join()?` works at the call site + // while keeping the enum variant typed via `#[from]`. + fn from(value: Box) -> Self { + match JoinThreadPanic::try_from(value) { + Ok(panic) => panic.into(), + Err(err) => err.into(), + } + } +} + +struct FormatSession { + client_conn: Connection, + server_thread: Option>, + next_id: i32, +} + +impl FormatSession { + fn send(&self, message: Message) -> Result<()> { + self.client_conn.sender.send(message)?; + Ok(()) + } + + fn start(initialization_options: serde_json::Value) -> Result { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + let session = Self { + client_conn, + server_thread: Some(server_thread), + next_id: 2, + }; + + session.send(Message::Request(initialize_request_with_options( + 1, + initialization_options, + )))?; + let _ = recv_response(&session.client_conn, 1); + session.send(Message::Notification(initialized_notification()))?; + + Ok(session) + } + + fn open(&self, uri: &str, text: &str) -> Result<()> { + self.send(Message::Notification(did_open_notification(uri, text))) + } + + fn change_configuration(&self, settings: serde_json::Value) -> Result<()> { + self.send(Message::Notification( + did_change_configuration_notification(settings), + )) + } + + fn request_formatting( + &mut self, + uri: &str, + tab_size: u32, + insert_spaces: bool, + ) -> Result>> { + let request_id = self.next_id; + self.next_id += 1; + + self.send(Message::Request(formatting_request( + request_id, + uri, + tab_size, + insert_spaces, + )))?; + + let response = recv_response(&self.client_conn, request_id); + if let Some(error) = response.error { + return Err(FormatTestError::FormattingRequestFailed { + request_id, + error: format!("{error:?}"), + }); + } + + let result = response + .result + .ok_or(FormatTestError::MissingFormattingResult { request_id })?; + Ok(serde_json::from_value(result)?) + } + + fn request_formatting_with_options( + &mut self, + uri: &str, + tab_size: u32, + insert_spaces: bool, + trim_trailing_whitespace: Option, + insert_final_newline: Option, + trim_final_newlines: Option, + ) -> Result>> { + let request_id = self.next_id; + self.next_id += 1; + + self.send(Message::Request(formatting_request_with_options( + request_id, + uri, + tab_size, + insert_spaces, + trim_trailing_whitespace, + insert_final_newline, + trim_final_newlines, + )))?; + + let response = recv_response(&self.client_conn, request_id); + if let Some(error) = response.error { + return Err(FormatTestError::FormattingRequestFailed { + request_id, + error: format!("{error:?}"), + }); + } + + let result = response + .result + .ok_or(FormatTestError::MissingFormattingResult { request_id })?; + Ok(serde_json::from_value(result)?) + } + + fn request_range_formatting_with_options( + &mut self, + uri: &str, + range: lsp_types::Range, + options: lsp_types::FormattingOptions, + ) -> Result>> { + let request_id = self.next_id; + self.next_id += 1; + + self.send(Message::Request(range_formatting_request_with_options( + request_id, uri, range, options, + )))?; + + let response = recv_response(&self.client_conn, request_id); + if let Some(error) = response.error { + return Err(FormatTestError::FormattingRequestFailed { + request_id, + error: format!("{error:?}"), + }); + } + + let result = response + .result + .ok_or(FormatTestError::MissingFormattingResult { request_id })?; + Ok(serde_json::from_value(result)?) + } + + fn shutdown(mut self) -> Result<()> { + let request_id = self.next_id; + self.next_id += 1; + + self.send(Message::Request(shutdown_request(request_id)))?; + let _ = recv_response(&self.client_conn, request_id); + self.send(Message::Notification(exit_notification()))?; + + if let Some(handle) = self.server_thread.take() { + handle.join()?; + } + + Ok(()) + } +} + +#[derive(Debug)] +struct SingleFormattingCase { + init_options: serde_json::Value, + uri: &'static str, + text: &'static str, + tab_size: u32, + insert_spaces: bool, + expected: Option>, +} + +const SIMPLE_OBJECT: &str = "{a:1}"; + +#[rstest] +#[case(SingleFormattingCase { + init_options: serde_json::Value::Null, + uri: "file:///test/format-default.jsonnet", + text: SIMPLE_OBJECT, + tab_size: 8, + insert_spaces: true, + expected: Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { line: 0, character: 0 }, + end: Position { line: 0, character: 5 }, + }, + new_text: "{\n a: 1,\n}\n".to_string(), + }]), +})] +#[case(SingleFormattingCase { + init_options: serde_json::Value::Null, + uri: "file:///test/format-parse-error.jsonnet", + text: "local x = ", + tab_size: 2, + insert_spaces: true, + expected: None, +})] +#[case(SingleFormattingCase { + init_options: serde_json::json!({ + "formatting": { + "indent": 6 + } + }), + uri: "file:///test/format-request-options-override.jsonnet", + text: SIMPLE_OBJECT, + tab_size: 3, + insert_spaces: true, + expected: Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { line: 0, character: 0 }, + end: Position { line: 0, character: 5 }, + }, + new_text: "{\n a: 1,\n}\n".to_string(), + }]), +})] +fn test_document_formatting_single_request_cases(#[case] case: SingleFormattingCase) { + let mut session = FormatSession::start(case.init_options).expect("start format session"); + session + .open(case.uri, case.text) + .expect("open document for formatting test"); + let edits = session + .request_formatting(case.uri, case.tab_size, case.insert_spaces) + .expect("request document formatting"); + assert_eq!(edits, case.expected); + session.shutdown().expect("shutdown format session"); +} + +#[test] +fn test_document_formatting_applies_runtime_formatting_config_changes() { + let mut session = FormatSession::start(serde_json::Value::Null).expect("start format session"); + + let string_uri = "file:///test/format-config-string.jsonnet"; + session + .open(string_uri, "{a:'x'}") + .expect("open string-style document"); + + let before_edits = session + .request_formatting(string_uri, 2, true) + .expect("request formatting before config change"); + assert_eq!( + before_edits, + Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "{\n a: 'x',\n}\n".to_string(), + }]) + ); + + session + .change_configuration(serde_json::json!({ + "jsonnet": { + "formatting": { + "indent": 2, + "string_style": "double" + } + } + })) + .expect("apply string_style config change"); + + let after_style_edits = session + .request_formatting(string_uri, 8, false) + .expect("request formatting after string_style change"); + assert_eq!( + after_style_edits, + Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "{\n\ta: \"x\",\n}\n".to_string(), + }]) + ); + + let empty_uri = "file:///test/format-config-empty.jsonnet"; + session + .open(empty_uri, "{}") + .expect("open empty object document"); + + session + .change_configuration(serde_json::json!({ + "jsonnet": { + "formatting": { + "pad_objects": false + } + } + })) + .expect("apply pad_objects config change"); + + let after_padding_edits = session + .request_formatting(empty_uri, 4, true) + .expect("request formatting after pad_objects change"); + assert_eq!( + after_padding_edits, + Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 2, + }, + }, + new_text: "{}\n".to_string(), + }]) + ); + + session.shutdown().expect("shutdown format session"); +} + +#[test] +fn test_document_formatting_respects_lsp_optional_formatting_options() { + let mut session = FormatSession::start(serde_json::Value::Null).expect("start format session"); + let uri = "file:///test/format-request-options.jsonnet"; + session + .open(uri, SIMPLE_OBJECT) + .expect("open formatting options document"); + + let edits = session + .request_formatting_with_options(uri, 2, true, Some(true), Some(false), Some(true)) + .expect("request formatting with options"); + assert_eq!( + edits, + Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 5, + }, + }, + new_text: "{\n a: 1,\n}".to_string(), + }]) + ); + + session.shutdown().expect("shutdown format session"); +} + +#[test] +fn test_range_formatting_returns_edit_for_changes_within_requested_range() { + let mut session = FormatSession::start(serde_json::Value::Null).expect("start format session"); + let uri = "file:///test/format-range-contained.jsonnet"; + session + .open(uri, "{\n a: 1,\n b:2,\n}\n") + .expect("open range formatting document"); + + let edits = session + .request_range_formatting_with_options( + uri, + lsp_types::Range { + start: Position { + line: 2, + character: 0, + }, + end: Position { + line: 2, + character: 6, + }, + }, + formatting_options(2, true, None, None, None), + ) + .expect("request range formatting"); + + assert_eq!( + edits, + Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 2, + character: 4, + }, + end: Position { + line: 2, + character: 4, + }, + }, + new_text: " ".to_string(), + }]) + ); + + session.shutdown().expect("shutdown format session"); +} + +#[test] +fn test_range_formatting_returns_no_edits_when_changes_escape_requested_range() { + let mut session = FormatSession::start(serde_json::Value::Null).expect("start format session"); + let uri = "file:///test/format-range-outside.jsonnet"; + session + .open(uri, "{\n a:1,\n b:2,\n}\n") + .expect("open range formatting document"); + + let edits = session + .request_range_formatting_with_options( + uri, + lsp_types::Range { + start: Position { + line: 1, + character: 0, + }, + end: Position { + line: 1, + character: 6, + }, + }, + formatting_options(2, true, None, None, None), + ) + .expect("request range formatting"); + + assert_eq!(edits, Some(Vec::new())); + + session.shutdown().expect("shutdown format session"); +} diff --git a/crates/jrsonnet-lsp/tests/integration/lifecycle.rs b/crates/jrsonnet-lsp/tests/integration/lifecycle.rs new file mode 100644 index 00000000..88cc0e74 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration/lifecycle.rs @@ -0,0 +1,792 @@ +use super::*; + +#[test] +fn test_initialize_shutdown() { + // Create an in-memory connection pair + let (client_conn, server_conn) = Connection::memory(); + + // Run the server in a background thread + let server_thread = run_server(server_conn); + + // Send initialize request + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + + // Receive initialize response + let response = client_conn.receiver.recv().expect("expected success"); + assert_matches!(response, Message::Response(resp) => { + assert_eq!(resp.id, 1.into()); + assert!(resp.error.is_none(), "Initialize should succeed"); + let result = resp.result.expect("should have result"); + let capabilities = result + .get("capabilities") + .expect("should have capabilities"); + assert_eq!( + capabilities.get("documentHighlightProvider"), + Some(&serde_json::Value::Bool(true)), + "document highlight capability should be advertised", + ); + assert_eq!( + capabilities.get("inlayHintProvider"), + Some(&serde_json::Value::Bool(true)), + "inlay hint capability should be advertised", + ); + assert_eq!( + capabilities.get("documentRangeFormattingProvider"), + Some(&serde_json::Value::Bool(true)), + "range formatting capability should be advertised", + ); + assert_eq!( + capabilities + .get("codeActionProvider") + .and_then(|provider| provider.get("codeActionKinds")), + Some(&serde_json::json!(["quickfix", "source.fixAll"])), + "code action kinds should be advertised", + ); + assert_eq!( + capabilities + .get("executeCommandProvider") + .and_then(|provider| provider.get("commands")), + Some(&serde_json::json!(["jrsonnet.evalFile"])), + "execute command capability should advertise all command IDs", + ); + assert_eq!( + capabilities + .get("codeLensProvider") + .and_then(|provider| provider.get("resolveProvider")), + Some(&serde_json::Value::Bool(true)), + "code lens resolve capability should be advertised", + ); + assert_eq!( + capabilities.get("declarationProvider"), + Some(&serde_json::Value::Bool(true)), + "declaration capability should be advertised", + ); + assert_eq!( + capabilities.get("implementationProvider"), + Some(&serde_json::Value::Bool(true)), + "implementation capability should be advertised", + ); + let server_name = result + .get("serverInfo") + .and_then(|s| s.get("name")) + .and_then(|n| n.as_str()) + .expect("should have serverInfo.name"); + assert!(server_name.contains("jrsonnet")); + }); + + // Send initialized notification + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + // Send shutdown request + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .expect("expected success"); + + // Receive shutdown response + let response = client_conn.receiver.recv().expect("expected success"); + assert_matches!(response, Message::Response(resp) => { + assert_eq!(resp.id, 2.into()); + assert!(resp.error.is_none(), "Shutdown should succeed"); + }); + + // Send exit notification + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + + // Wait for server to exit + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_diagnostics_on_open() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); // ignore response + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + // Open a document with a syntax error + let uri = "file:///test/error.jsonnet"; + let text = "{ a: }"; // Missing value - syntax error + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + + // Should receive diagnostics notification + let notification = client_conn.receiver.recv().expect("expected success"); + assert_matches!(notification, Message::Notification(notif) => { + assert_eq!(notif.method, PublishDiagnostics::METHOD); + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notif.params).expect("expected success"); + assert!( + !params.diagnostics.is_empty(), + "Should have diagnostics for syntax error" + ); + }); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_diagnostics_refresh_on_did_save_with_text() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/save-refresh.jsonnet"; + client_conn + .sender + .send(Message::Notification(did_open_notification( + uri, "{ a: 1 }", + ))) + .expect("expected success"); + let opened = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + assert!(opened.diagnostics.is_empty()); + + client_conn + .sender + .send(Message::Notification(did_save_notification( + uri, + Some("{ a: }"), + ))) + .expect("expected success"); + let saved_invalid = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + assert!( + !saved_invalid.diagnostics.is_empty(), + "saving invalid text should publish diagnostics" + ); + + client_conn + .sender + .send(Message::Notification(did_save_notification( + uri, + Some("{ a: 2 }"), + ))) + .expect("expected success"); + let saved_valid = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + assert!(saved_valid.diagnostics.is_empty()); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .expect("expected success"); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_configuration_change_reconfigures_eval_diagnostics() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize with eval diagnostics enabled. + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + serde_json::json!({ + "enableEvalDiagnostics": true + }), + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/eval-config-change.jsonnet"; + let text = "error 'boom'"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + + let initial_diagnostics = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + assert!( + initial_diagnostics + .diagnostics + .iter() + .any(|diag| diag.source.as_deref() == Some("jrsonnet-eval")), + "expected eval diagnostics to be present before config change" + ); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "enableEvalDiagnostics": false + } + })), + )) + .expect("expected success"); + + let updated_diagnostics = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + assert!( + updated_diagnostics + .diagnostics + .iter() + .all(|diag| diag.source.as_deref() != Some("jrsonnet-eval")), + "expected eval diagnostics to be removed after config change" + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .expect("expected success"); + let _ = recv_response(&client_conn, 2); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_lint_and_eval_do_not_duplicate_type_diagnostics() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + serde_json::json!({ + "enableEvalDiagnostics": true, + "enableLintDiagnostics": true + }), + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/type-dedupe.jsonnet"; + client_conn + .sender + .send(Message::Notification(did_open_notification( + uri, + "std.length(1)", + ))) + .expect("expected success"); + + let diagnostics = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + assert_eq!( + diagnostics.diagnostics.len(), + 1, + "expected duplicate eval type diagnostic to be suppressed when lint type diagnostic exists" + ); + let diag = &diagnostics.diagnostics[0]; + assert_eq!(diag.source.as_deref(), Some("jrsonnet-lint")); + assert_eq!( + diag.code, + Some(lsp_types::NumberOrString::String("type-error".to_string())) + ); + assert_eq!( + diag.range, + lsp_types::Range { + start: Position { + line: 0, + character: 11, + }, + end: Position { + line: 0, + character: 12, + }, + } + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .expect("expected success"); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_configuration_change_requests_inlay_hint_refresh_when_supported() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request( + initialize_request_with_inlay_hint_refresh_support(1), + )) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "inlayHints": { + "callArguments": "all", + } + } + })), + )) + .expect("expected success"); + + let refresh_request = super::recv_until( + &client_conn, + super::LONG_RESPONSE_TIMEOUT, + |message| match message { + Message::Request(request) if request.method == InlayHintRefreshRequest::METHOD => { + Some(request) + } + _ => None, + }, + ) + .expect("timed out waiting for inlay hint refresh request"); + assert_eq!(refresh_request.params, serde_json::Value::Null); + + client_conn + .sender + .send(Message::Response(lsp_server::Response::new_ok( + refresh_request.id, + serde_json::Value::Null, + ))) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .expect("expected success"); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_configuration_change_reindexes_closed_import_graph_entries() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_a = tmp.path().join("jpath-a"); + let jpath_b = tmp.path().join("jpath-b"); + let workspace = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_a).expect("jpath-a directory should be created"); + fs::create_dir_all(&jpath_b).expect("jpath-b directory should be created"); + fs::create_dir_all(&workspace).expect("workspace directory should be created"); + + let lib_a_path = jpath_a.join("lib.libsonnet"); + let lib_b_path = jpath_b.join("lib.libsonnet"); + let main_path = workspace.join("main.jsonnet"); + fs::write(&lib_a_path, "{ from: 'a' }").expect("jpath-a lib should be written"); + fs::write(&lib_b_path, "{ from: 'b' }").expect("jpath-b lib should be written"); + fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib"#) + .expect("main should be written"); + + let lib_a_uri = file_uri( + &lib_a_path + .canonicalize() + .expect("lib_a should canonicalize"), + ); + let lib_b_uri = file_uri( + &lib_b_path + .canonicalize() + .expect("lib_b should canonicalize"), + ); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + let main_text = fs::read_to_string(&main_path).expect("main text should be readable"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + serde_json::json!({ + "jpath": [jpath_a.to_string_lossy().to_string()], + }), + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, &main_text, + ))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Notification(did_close_notification(&main_uri))) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "jpath": [jpath_b.to_string_lossy().to_string()] + } + })), + )) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(custom_find_transitive_importers_request( + 2, &lib_a_uri, + ))) + .expect("expected success"); + let old_target_response = recv_response(&client_conn, 2); + assert!( + old_target_response.error.is_none(), + "findTransitiveImporters for old jpath target should succeed" + ); + assert_eq!( + old_target_response + .result + .expect("should have old target command result"), + serde_json::json!({ + "file": lib_a_uri, + "transitiveImporters": [], + }) + ); + + client_conn + .sender + .send(Message::Request(custom_find_transitive_importers_request( + 3, &lib_b_uri, + ))) + .expect("expected success"); + let new_target_response = recv_response(&client_conn, 3); + assert!( + new_target_response.error.is_none(), + "findTransitiveImporters for new jpath target should succeed" + ); + assert_eq!( + new_target_response + .result + .expect("should have new target command result"), + serde_json::json!({ + "file": lib_b_uri, + "transitiveImporters": [main_uri], + }) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_valid_document_no_errors() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + // Open a valid document + let uri = "file:///test/valid.jsonnet"; + let text = r#"{ hello: "world", answer: 42 }"#; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + + // Should receive diagnostics notification with empty diagnostics + let notification = client_conn.receiver.recv().expect("expected success"); + assert_matches!(notification, Message::Notification(notif) => { + assert_eq!(notif.method, PublishDiagnostics::METHOD); + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notif.params).expect("expected success"); + assert!( + params.diagnostics.is_empty(), + "Valid document should have no diagnostics" + ); + }); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_initialize_registers_did_change_watched_files_when_supported() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request( + initialize_request_with_dynamic_watched_files(1), + )) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let register_request = super::recv_until( + &client_conn, + super::LONG_RESPONSE_TIMEOUT, + |message| match message { + Message::Request(request) => Some(request), + _ => None, + }, + ) + .expect("timed out waiting for registerCapability request"); + assert_eq!(register_request.method, RegisterCapability::METHOD); + + let actual_params: RegistrationParams = + serde_json::from_value(register_request.params).expect("expected success"); + let expected_options = DidChangeWatchedFilesRegistrationOptions { + watchers: vec![ + FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.jsonnet".to_owned()), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.libsonnet".to_owned()), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.json".to_owned()), + kind: None, + }, + ], + }; + let expected_params = RegistrationParams { + registrations: vec![Registration { + id: "jrsonnet-lsp.did-change-watched-files".to_owned(), + method: DidChangeWatchedFiles::METHOD.to_owned(), + register_options: Some( + serde_json::to_value(expected_options).expect("expected success"), + ), + }], + }; + assert_eq!(actual_params, expected_params); + + client_conn + .sender + .send(Message::Response(lsp_server::Response::new_ok( + register_request.id, + serde_json::Value::Null, + ))) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .expect("expected success"); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_initialize_uses_relative_watch_patterns_when_supported() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root_uri = file_uri(tmp.path()); + let parsed_root_uri: lsp_types::Uri = root_uri.parse().expect("expected success"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request( + initialize_request_with_dynamic_watched_files_relative(1, &root_uri), + )) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let register_request = super::recv_until( + &client_conn, + super::LONG_RESPONSE_TIMEOUT, + |message| match message { + Message::Request(request) => Some(request), + _ => None, + }, + ) + .expect("timed out waiting for registerCapability request"); + assert_eq!(register_request.method, RegisterCapability::METHOD); + + let actual_params: RegistrationParams = + serde_json::from_value(register_request.params).expect("expected success"); + let expected_options = DidChangeWatchedFilesRegistrationOptions { + watchers: vec![ + FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(parsed_root_uri.clone()), + pattern: "**/*.jsonnet".to_owned(), + }), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(parsed_root_uri.clone()), + pattern: "**/*.libsonnet".to_owned(), + }), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(parsed_root_uri), + pattern: "**/*.json".to_owned(), + }), + kind: None, + }, + ], + }; + let expected_params = RegistrationParams { + registrations: vec![Registration { + id: "jrsonnet-lsp.did-change-watched-files".to_owned(), + method: DidChangeWatchedFiles::METHOD.to_owned(), + register_options: Some( + serde_json::to_value(expected_options).expect("expected success"), + ), + }], + }; + assert_eq!(actual_params, expected_params); + + client_conn + .sender + .send(Message::Response(lsp_server::Response::new_ok( + register_request.id, + serde_json::Value::Null, + ))) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .expect("expected success"); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} diff --git a/crates/jrsonnet-lsp/tests/integration/mod.rs b/crates/jrsonnet-lsp/tests/integration/mod.rs new file mode 100644 index 00000000..1d08e162 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration/mod.rs @@ -0,0 +1,1127 @@ +//! Integration tests for the LSP server. +//! +//! Uses in-process testing with channels rather than subprocess management, +//! following patterns from ast-grep and simple-completion-language-server. + +use std::{fmt::Write as _, fs, thread, time::Duration}; + +pub(crate) use crate::support::{ + recv_response_by_id, recv_until, LONG_RESPONSE_TIMEOUT, RESPONSE_TIMEOUT, +}; +use assert_matches::assert_matches; +use jrsonnet_lsp_handlers::{SemanticTokenModifierName, SemanticTokenTypeName}; +use lsp_server::{Connection, Message, Notification, Request}; +use lsp_types::{ + notification::{ + Cancel, DidChangeConfiguration, DidChangeWatchedFiles, DidCloseTextDocument, + DidOpenTextDocument, DidSaveTextDocument, Notification as _, PublishDiagnostics, + }, + request::{ + CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, + ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, GotoImplementation, + GotoTypeDefinition, Initialize, InlayHintRefreshRequest, InlayHintRequest, RangeFormatting, + References, RegisterCapability, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, + WorkspaceSymbolRequest, + }, + CancelParams, DidChangeConfigurationParams, DidChangeWatchedFilesClientCapabilities, + DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, + DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, + ExecuteCommandParams, FileChangeType, FileEvent, FileSystemWatcher, GlobPattern, + GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, + InlayHintWorkspaceClientCapabilities, NumberOrString, OneOf, PartialResultParams, Position, + ReferenceContext, ReferenceParams, Registration, RegistrationParams, RelativePattern, + RenameParams, SemanticTokensRangeParams, TextDocumentIdentifier, TextDocumentItem, + TextDocumentPositionParams, WorkDoneProgressParams, WorkspaceClientCapabilities, + WorkspaceFolder, +}; +use serde::Deserialize; +use serde_json::json; +use tempfile::TempDir; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +struct ExpectedSemanticToken { + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: u32, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +enum RequestErrorData { + InvalidParams { method: String }, + MethodNotFound { method: String }, + UnknownExecuteCommand { command: String }, + MissingExecuteHandler { command: String }, + RequestCanceled { method: String }, + ServerShuttingDown, + AsyncHandlerFailed { method: String }, + AsyncHandlerPanicked { method: String }, +} + +impl ExpectedSemanticToken { + const fn new( + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: u32, + ) -> Self { + Self { + line, + start, + len, + token_type, + modifiers, + } + } +} + +fn semantic_modifiers(modifiers: &[SemanticTokenModifierName]) -> u32 { + modifiers + .iter() + .fold(0_u32, |acc, modifier| acc | modifier.as_bitset()) +} + +fn semantic_token( + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: &[SemanticTokenModifierName], +) -> ExpectedSemanticToken { + ExpectedSemanticToken::new(line, start, len, token_type, semantic_modifiers(modifiers)) +} + +fn encode_semantic_tokens(mut tokens: Vec) -> lsp_types::SemanticTokens { + tokens.sort_by_key(|token| (token.line, token.start)); + let mut encoded = Vec::with_capacity(tokens.len()); + let mut prev_line = 0_u32; + let mut prev_start = 0_u32; + for token in tokens { + let delta_line = token.line - prev_line; + let delta_start = if delta_line == 0 { + token.start - prev_start + } else { + token.start + }; + encoded.push(lsp_types::SemanticToken { + delta_line, + delta_start, + length: token.len, + token_type: token.token_type.as_index(), + token_modifiers_bitset: token.modifiers, + }); + prev_line = token.line; + prev_start = token.start; + } + + lsp_types::SemanticTokens { + result_id: None, + data: encoded, + } +} + +/// Helper to create an initialize request. +fn initialize_request(id: i32) -> Request { + initialize_request_with_options(id, serde_json::Value::Null) +} + +/// Helper to create an initialize request with custom initialization options. +fn initialize_request_with_options(id: i32, initialization_options: serde_json::Value) -> Request { + let mut params = InitializeParams::default(); + if !initialization_options.is_null() { + params.initialization_options = Some(initialization_options); + } + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create an initialize request with a workspace root URI. +fn initialize_request_with_root_uri(id: i32, root_uri: &str) -> Request { + let mut params = serde_json::to_value(InitializeParams::default()).expect("expected success"); + if let Some(object) = params.as_object_mut() { + object.insert("rootUri".to_string(), serde_json::json!(root_uri)); + } + Request::new(id.into(), Initialize::METHOD.to_string(), params) +} + +/// Helper to create an initialize request that advertises dynamic watched-file +/// registration support. +fn initialize_request_with_dynamic_watched_files(id: i32) -> Request { + let params = InitializeParams { + capabilities: lsp_types::ClientCapabilities { + workspace: Some(WorkspaceClientCapabilities { + did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities { + dynamic_registration: Some(true), + relative_pattern_support: Some(false), + }), + ..WorkspaceClientCapabilities::default() + }), + ..lsp_types::ClientCapabilities::default() + }, + ..InitializeParams::default() + }; + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create an initialize request that advertises dynamic watched-file +/// registration support and relative pattern support. +fn initialize_request_with_dynamic_watched_files_relative(id: i32, root_uri: &str) -> Request { + let params = InitializeParams { + workspace_folders: Some(vec![WorkspaceFolder { + uri: root_uri.parse().expect("expected success"), + name: "workspace".to_owned(), + }]), + capabilities: lsp_types::ClientCapabilities { + workspace: Some(WorkspaceClientCapabilities { + did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities { + dynamic_registration: Some(true), + relative_pattern_support: Some(true), + }), + ..WorkspaceClientCapabilities::default() + }), + ..lsp_types::ClientCapabilities::default() + }, + ..InitializeParams::default() + }; + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create an initialize request that advertises +/// `workspace/inlayHint/refresh` support. +fn initialize_request_with_inlay_hint_refresh_support(id: i32) -> Request { + let params = InitializeParams { + capabilities: lsp_types::ClientCapabilities { + workspace: Some(WorkspaceClientCapabilities { + inlay_hint: Some(InlayHintWorkspaceClientCapabilities { + refresh_support: Some(true), + }), + ..WorkspaceClientCapabilities::default() + }), + ..lsp_types::ClientCapabilities::default() + }, + ..InitializeParams::default() + }; + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a shutdown request. +fn shutdown_request(id: i32) -> Request { + Request::new( + id.into(), + Shutdown::METHOD.to_string(), + serde_json::Value::Null, + ) +} + +/// Helper to create an initialized notification. +fn initialized_notification() -> Notification { + Notification::new("initialized".to_string(), json!({})) +} + +/// Helper to create an exit notification. +fn exit_notification() -> Notification { + Notification::new("exit".to_string(), json!({})) +} + +/// Helper to create a $/cancelRequest notification. +fn cancel_request_notification(request_id: i32) -> Notification { + let params = CancelParams { + id: NumberOrString::Number(request_id), + }; + Notification::new( + Cancel::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a didOpen notification. +fn did_open_notification(uri: &str, text: &str) -> Notification { + let params = DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: uri.parse().expect("expected success"), + language_id: "jsonnet".to_string(), + version: 1, + text: text.to_string(), + }, + }; + Notification::new( + DidOpenTextDocument::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn did_save_notification(uri: &str, text: Option<&str>) -> Notification { + let params = DidSaveTextDocumentParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + text: text.map(ToString::to_string), + }; + Notification::new( + DidSaveTextDocument::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn did_close_notification(uri: &str) -> Notification { + let params = DidCloseTextDocumentParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + }; + Notification::new( + DidCloseTextDocument::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a goto definition request. +fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoDefinition::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn goto_type_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoTypeDefinition::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn assert_type_definition_matches_definition( + conn: &Connection, + definition_id: i32, + type_definition_id: i32, + uri: &str, + line: u32, + character: u32, +) -> Option { + conn.sender + .send(Message::Request(goto_definition_request( + definition_id, + uri, + line, + character, + ))) + .expect("expected success"); + let definition_response = recv_response(conn, definition_id); + assert!( + definition_response.error.is_none(), + "Goto definition request should succeed" + ); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")) + .expect("expected success"); + + conn.sender + .send(Message::Request(goto_type_definition_request( + type_definition_id, + uri, + line, + character, + ))) + .expect("expected success"); + let type_definition_response = recv_response(conn, type_definition_id); + assert!( + type_definition_response.error.is_none(), + "Goto type definition request should succeed" + ); + let type_definition_result: Option = + serde_json::from_value(type_definition_response.result.expect("should have result")) + .expect("expected success"); + + assert_eq!( + type_definition_result, definition_result, + "typeDefinition should match definition for Jsonnet symbol navigation" + ); + definition_result +} + +fn send_goto_and_parse( + conn: &Connection, + id: i32, + label: &str, + request: Request, +) -> Option { + conn.sender + .send(Message::Request(request)) + .expect("expected success"); + let response = recv_response(conn, id); + assert!(response.error.is_none(), "{label} request should succeed"); + serde_json::from_value(response.result.expect("should have result")).expect("expected success") +} + +/// Helper to create a goto declaration request. +fn goto_declaration_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoDeclaration::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn goto_implementation_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoImplementation::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a references request. +fn references_request( + id: i32, + uri: &str, + line: u32, + character: u32, + include_declaration: bool, +) -> Request { + let params = ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + context: ReferenceContext { + include_declaration, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + References::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn document_highlight_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = lsp_types::DocumentHighlightParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + DocumentHighlightRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn rename_request(id: i32, uri: &str, line: u32, character: u32, new_name: &str) -> Request { + let params = RenameParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + new_name: new_name.to_string(), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + Rename::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn inlay_hint_request( + id: i32, + uri: &str, + start_line: u32, + start_character: u32, + end_line: u32, + end_character: u32, +) -> Request { + let params = lsp_types::InlayHintParams { + work_done_progress_params: WorkDoneProgressParams::default(), + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + range: lsp_types::Range { + start: Position { + line: start_line, + character: start_character, + }, + end: Position { + line: end_line, + character: end_character, + }, + }, + }; + Request::new( + id.into(), + InlayHintRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn formatting_request(id: i32, uri: &str, tab_size: u32, insert_spaces: bool) -> Request { + formatting_request_with_options(id, uri, tab_size, insert_spaces, None, None, None) +} + +fn formatting_options( + tab_size: u32, + insert_spaces: bool, + trim_trailing_whitespace: Option, + insert_final_newline: Option, + trim_final_newlines: Option, +) -> lsp_types::FormattingOptions { + lsp_types::FormattingOptions { + tab_size, + insert_spaces, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace, + insert_final_newline, + trim_final_newlines, + } +} + +fn formatting_request_with_options( + id: i32, + uri: &str, + tab_size: u32, + insert_spaces: bool, + trim_trailing_whitespace: Option, + insert_final_newline: Option, + trim_final_newlines: Option, +) -> Request { + let params = lsp_types::DocumentFormattingParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + options: formatting_options( + tab_size, + insert_spaces, + trim_trailing_whitespace, + insert_final_newline, + trim_final_newlines, + ), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + Formatting::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn range_formatting_request_with_options( + id: i32, + uri: &str, + range: lsp_types::Range, + options: lsp_types::FormattingOptions, +) -> Request { + let params = lsp_types::DocumentRangeFormattingParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + range, + options, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + RangeFormatting::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn semantic_tokens_range_request( + id: i32, + uri: &str, + start_line: u32, + start_character: u32, + end_line: u32, + end_character: u32, +) -> Request { + let params = SemanticTokensRangeParams { + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + range: lsp_types::Range { + start: Position { + line: start_line, + character: start_character, + }, + end: Position { + line: end_line, + character: end_character, + }, + }, + }; + Request::new( + id.into(), + SemanticTokensRangeRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn code_action_request( + id: i32, + uri: &str, + range: lsp_types::Range, + diagnostics: Vec, + only: Option>, +) -> Request { + let params = lsp_types::CodeActionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + range, + context: lsp_types::CodeActionContext { + diagnostics, + only, + trigger_kind: None, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + CodeActionRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn code_lens_request(id: i32, uri: &str) -> Request { + let params = lsp_types::CodeLensParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + CodeLensRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn code_lens_resolve_request(id: i32, lens: lsp_types::CodeLens) -> Request { + Request::new( + id.into(), + CodeLensResolve::METHOD.to_string(), + serde_json::to_value(lens).expect("expected success"), + ) +} + +fn did_change_watched_files_notification(changes: Vec) -> Notification { + let params = DidChangeWatchedFilesParams { changes }; + Notification::new( + DidChangeWatchedFiles::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn did_change_configuration_notification(settings: serde_json::Value) -> Notification { + let params = DidChangeConfigurationParams { settings }; + Notification::new( + DidChangeConfiguration::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn execute_command_request(id: i32, command: &str, arguments: Vec) -> Request { + let params = ExecuteCommandParams { + command: command.to_string(), + arguments, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + ExecuteCommand::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn custom_eval_file_request(id: i32, uri: &str) -> Request { + Request::new( + id.into(), + "jrsonnet/evalFile".to_string(), + serde_json::json!({ + "textDocument": { + "uri": uri, + }, + }), + ) +} + +fn custom_eval_expression_request(id: i32, expression: &str, base_uri: Option<&str>) -> Request { + let mut params = serde_json::json!({ + "expression": expression, + }); + if let Some(base_uri) = base_uri { + params + .as_object_mut() + .expect("evalExpression params should be an object") + .insert( + "baseDocument".to_string(), + serde_json::json!({ + "uri": base_uri, + }), + ); + } + + Request::new(id.into(), "jrsonnet/evalExpression".to_string(), params) +} + +fn custom_find_transitive_importers_request(id: i32, uri: &str) -> Request { + Request::new( + id.into(), + "jrsonnet/findTransitiveImporters".to_string(), + serde_json::json!({ + "textDocument": { + "uri": uri, + }, + }), + ) +} + +fn workspace_symbol_request(id: i32, query: &str) -> Request { + let params = lsp_types::WorkspaceSymbolParams { + query: query.to_string(), + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + WorkspaceSymbolRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn request_workspace_symbols( + conn: &Connection, + id: i32, + query: &str, +) -> Option> { + conn.sender + .send(Message::Request(workspace_symbol_request(id, query))) + .expect("expected success"); + let response = recv_response(conn, id); + assert!(response.error.is_none(), "workspace/symbol should succeed"); + serde_json::from_value( + response + .result + .expect("workspace/symbol should return result"), + ) + .expect("expected success") +} + +fn code_action_test_range() -> lsp_types::Range { + lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 20, + }, + } +} + +fn unused_variable_diagnostic() -> lsp_types::Diagnostic { + lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::WARNING), + code: Some(lsp_types::NumberOrString::String( + "unused-variable".to_string(), + )), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + } +} + +fn request_code_actions( + conn: &Connection, + id: i32, + uri: &str, + diagnostics: Vec, + only: Option>, +) -> Option> { + conn.sender + .send(Message::Request(code_action_request( + id, + uri, + code_action_test_range(), + diagnostics, + only, + ))) + .expect("expected success"); + let response = recv_response(conn, id); + assert!(response.error.is_none(), "Code action should succeed"); + serde_json::from_value(response.result.expect("should have result")).expect("expected success") +} + +fn expected_unused_variable_quickfix( + uri: &str, + diagnostic: lsp_types::Diagnostic, +) -> Vec { + let parsed_uri: lsp_types::Uri = uri.parse().expect("expected success"); + let mut prefix_changes = std::collections::HashMap::new(); + prefix_changes.insert( + parsed_uri.clone(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + ); + let mut remove_changes = std::collections::HashMap::new(); + remove_changes.insert( + parsed_uri.clone(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 11, + }, + }, + new_text: String::new(), + }], + ); + let mut fix_all_changes = std::collections::HashMap::new(); + fix_all_changes.insert( + parsed_uri, + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 11, + }, + }, + new_text: String::new(), + }], + ); + + vec![ + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(prefix_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(remove_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(fix_all_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] +} + +fn expected_unused_import_binding_actions( + uri: &str, + diagnostic: lsp_types::Diagnostic, +) -> Vec { + let parsed_uri: lsp_types::Uri = uri.parse().expect("expected success"); + let mut prefix_changes = std::collections::HashMap::new(); + prefix_changes.insert( + parsed_uri.clone(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + ); + let mut remove_changes = std::collections::HashMap::new(); + remove_changes.insert( + parsed_uri.clone(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 32, + }, + }, + new_text: String::new(), + }], + ); + let mut fix_all_changes = std::collections::HashMap::new(); + fix_all_changes.insert( + parsed_uri, + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 32, + }, + }, + new_text: String::new(), + }], + ); + + vec![ + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(prefix_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(remove_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(fix_all_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] +} + +fn location(uri: &str, start_character: u32, end_character: u32) -> lsp_types::Location { + lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 0, + character: start_character, + }, + end: Position { + line: 0, + character: end_character, + }, + }, + } +} + +fn expected_find_references(uri: &str, include_declaration: bool) -> Vec { + let mut references = Vec::with_capacity(if include_declaration { 3 } else { 2 }); + if include_declaration { + references.push(location(uri, 6, 7)); + } + references.push(location(uri, 13, 14)); + references.push(location(uri, 17, 18)); + references +} + +fn file_uri(path: &std::path::Path) -> String { + format!("file://{}", path.to_string_lossy()) +} + +fn recv_response(conn: &Connection, expected_id: i32) -> lsp_server::Response { + recv_response_by_id(conn, expected_id.into(), LONG_RESPONSE_TIMEOUT) + .expect("timed out waiting for response") +} + +fn parse_request_error_data(error: &lsp_server::ResponseError) -> RequestErrorData { + let data = error + .data + .clone() + .expect("response error should include structured data"); + serde_json::from_value(data).expect("response error data should decode") +} + +fn recv_publish_diagnostics_for_uri( + conn: &Connection, + uri: &str, + timeout: Duration, +) -> lsp_types::PublishDiagnosticsParams { + recv_until(conn, timeout, |message| { + let Message::Notification(notif) = message else { + return None; + }; + if notif.method != PublishDiagnostics::METHOD { + return None; + } + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notif.params).expect("expected success"); + (params.uri.as_str() == uri).then_some(params) + }) + .expect("expected diagnostics notification") +} + +/// Run the server with the given connection in a separate thread. +fn run_server(connection: Connection) -> thread::JoinHandle<()> { + thread::spawn(move || { + let server = jrsonnet_lsp::server::Server::new(connection); + let _ = server.run(); + }) +} + +mod features; +mod formatting; +mod lifecycle; +mod navigation; +mod workspace_cross_file; diff --git a/crates/jrsonnet-lsp/tests/integration/navigation.rs b/crates/jrsonnet-lsp/tests/integration/navigation.rs new file mode 100644 index 00000000..b00c4744 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration/navigation.rs @@ -0,0 +1,1100 @@ +use super::*; + +#[test] +fn test_goto_definition() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + // Open a document with a local binding + let uri = "file:///test/definition.jsonnet"; + let text = r"local x = 1; x + 1"; + // ^^^^^^ def ^ use at position (0, 13) + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + + // Receive diagnostics notification (discard) + let _ = client_conn.receiver.recv().expect("expected success"); + + // Send goto definition request for 'x' usage at position (0, 13) + client_conn + .sender + .send(Message::Request(goto_definition_request(2, uri, 0, 13))) + .expect("expected success"); + + // Should receive definition response + let response = client_conn.receiver.recv().expect("expected success"); + let response = assert_matches!(response, Message::Response(resp) => resp); + assert_eq!(response.id, 2.into()); + assert!(response.error.is_none(), "Goto definition should succeed"); + let result: Option = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_type_definition() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/type-definition.jsonnet"; + let text = "local x = 1; x + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(goto_type_definition_request( + 2, uri, 0, 13, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "Goto type definition request should succeed" + ); + let result: Option = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_type_definition_matches_definition_for_local_alias() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/type-definition-local-alias.jsonnet"; + let text = "local x = 1;\nlocal y = x;\ny"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + let result = assert_type_definition_matches_definition(&client_conn, 2, 3, uri, 2, 0); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_type_definition_matches_definition_for_import_targets() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +local plain = lib; +alias + std.length(plain)"#, + ) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().expect("expected success"); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, RESPONSE_TIMEOUT); + + // `alias` usage at line 3, col 0 resolves to imported field `foo`. + let alias_result = assert_type_definition_matches_definition(&client_conn, 2, 3, &uri, 3, 0); + assert_eq!( + alias_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.clone(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + // `plain` usage at line 3, col 19 resolves to import file root. + let plain_result = assert_type_definition_matches_definition(&client_conn, 4, 5, &uri, 3, 19); + assert_eq!( + plain_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 11, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .expect("expected success"); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_navigation_matrix_local_alias() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/navigation-matrix-local.jsonnet"; + let text = "local x = 1;\nlocal y = x;\ny"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + let declaration = send_goto_and_parse( + &client_conn, + 2, + "goto declaration", + goto_declaration_request(2, uri, 2, 0), + ); + let definition = send_goto_and_parse( + &client_conn, + 3, + "goto definition", + goto_definition_request(3, uri, 2, 0), + ); + let type_definition = send_goto_and_parse( + &client_conn, + 4, + "goto type definition", + goto_type_definition_request(4, uri, 2, 0), + ); + let implementation = send_goto_and_parse( + &client_conn, + 5, + "goto implementation", + goto_implementation_request(5, uri, 2, 0), + ); + + assert_eq!( + declaration, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + })) + ); + assert_eq!( + definition, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + assert_eq!(type_definition, definition); + assert_eq!( + implementation, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 1, + character: 10, + }, + end: Position { + line: 1, + character: 11, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .expect("expected success"); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_navigation_matrix_import_alias() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +alias"#, + ) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().expect("expected success"); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, RESPONSE_TIMEOUT); + + let declaration = send_goto_and_parse( + &client_conn, + 2, + "goto declaration", + goto_declaration_request(2, &uri, 2, 0), + ); + let definition = send_goto_and_parse( + &client_conn, + 3, + "goto definition", + goto_definition_request(3, &uri, 2, 0), + ); + let type_definition = send_goto_and_parse( + &client_conn, + 4, + "goto type definition", + goto_type_definition_request(4, &uri, 2, 0), + ); + let implementation = send_goto_and_parse( + &client_conn, + 5, + "goto implementation", + goto_implementation_request(5, &uri, 2, 0), + ); + + assert_eq!( + declaration, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 11, + }, + }, + })) + ); + assert_eq!( + definition, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + assert_eq!(type_definition, definition); + assert_eq!( + implementation, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 1, + character: 21, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .expect("expected success"); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_declaration() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/declaration.jsonnet"; + let text = "local x = 1; x + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(goto_declaration_request(2, uri, 0, 13))) + .expect("expected success"); + + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "Goto declaration request should succeed" + ); + let result: Option = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_implementation_local_binding() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/implementation-local.jsonnet"; + let text = "local x = 1; x + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, uri, 0, 13))) + .expect("expected success"); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_implementation_request(3, uri, 0, 13))) + .expect("expected success"); + let implementation_response = recv_response(&client_conn, 3); + let implementation_result: Option = + serde_json::from_value(implementation_response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + implementation_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 0, + character: 10, + }, + end: Position { + line: 0, + character: 11, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_definition_and_declaration_diverge_for_local_alias() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/alias-definition-vs-declaration.jsonnet"; + let text = "local x = 1;\nlocal y = x;\ny"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, uri, 2, 0))) + .expect("expected success"); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_declaration_request(3, uri, 2, 0))) + .expect("expected success"); + let declaration_response = recv_response(&client_conn, 3); + let declaration_result: Option = + serde_json::from_value(declaration_response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + declaration_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_implementation_import_field() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib.foo"#) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().expect("expected success"); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, &uri, 0, 40))) + .expect("expected success"); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.clone(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_declaration_request(3, &uri, 0, 40))) + .expect("expected success"); + let declaration_response = recv_response(&client_conn, 3); + let declaration_result: Option = + serde_json::from_value(declaration_response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + declaration_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.clone(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_implementation_request( + 4, &uri, 0, 40, + ))) + .expect("expected success"); + let implementation_response = recv_response(&client_conn, 4); + let implementation_result: Option = + serde_json::from_value(implementation_response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + implementation_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range { + start: Position { + line: 0, + character: 7, + }, + end: Position { + line: 0, + character: 9, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(5))) + .expect("expected success"); + let _ = recv_response(&client_conn, 5); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_definition_alias_to_import_field_vs_declaration() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +alias"#, + ) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().expect("expected success"); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, &uri, 2, 0))) + .expect("expected success"); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_declaration_request(3, &uri, 2, 0))) + .expect("expected success"); + let declaration_response = recv_response(&client_conn, 3); + let declaration_result: Option = + serde_json::from_value(declaration_response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + declaration_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 11, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_implementation_request(4, &uri, 2, 0))) + .expect("expected success"); + let implementation_response = recv_response(&client_conn, 4); + let implementation_result: Option = + serde_json::from_value(implementation_response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + implementation_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 1, + character: 21, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(5))) + .expect("expected success"); + let _ = recv_response(&client_conn, 5); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_diagnostics_import_file_and_definition_resolution() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib.foo"#) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().expect("expected success"); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .expect("expected success"); + + let diagnostics = recv_publish_diagnostics_for_uri(&client_conn, &uri, RESPONSE_TIMEOUT); + assert_eq!(diagnostics.uri.as_str(), uri); + assert!( + diagnostics.diagnostics.is_empty(), + "import-backed file should have no diagnostics" + ); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, &uri, 0, 40))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Goto definition should succeed"); + let result: Option = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} diff --git a/crates/jrsonnet-lsp/tests/integration/workspace_cross_file.rs b/crates/jrsonnet-lsp/tests/integration/workspace_cross_file.rs new file mode 100644 index 00000000..ce7b263b --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration/workspace_cross_file.rs @@ -0,0 +1,1180 @@ +use super::*; + +#[test] +fn test_watched_file_refreshes_unopened_importers_for_references() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib1_path = tmp.path().join("lib1.jsonnet"); + let lib2_path = tmp.path().join("lib2.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + + fs::write(&lib1_path, "local target = 1; target").expect("lib1 should be written"); + fs::write(&lib2_path, "local target = 2; target").expect("lib2 should be written"); + fs::write(&main_path, "local lib = import 'lib1.jsonnet'; lib.target") + .expect("main should be written"); + + let lib1_uri = file_uri(&lib1_path.canonicalize().expect("lib1 should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + // Open lib1 (current document for references requests) + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib1_uri, + "local target = 1; target", + ))) + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); // initial diagnostics + + // Index unopened main file via watched-files notification + client_conn + .sender + .send(Message::Notification( + did_change_watched_files_notification(vec![FileEvent { + uri: main_uri.parse().expect("expected success"), + typ: FileChangeType::CREATED, + }]), + )) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(custom_find_transitive_importers_request( + 20, &lib1_uri, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 20); + assert!(response.error.is_none(), "Command should succeed"); + let command_result = response.result.expect("command should return result"); + let importers = command_result + .get("transitiveImporters") + .expect("transitiveImporters key should exist") + .as_array() + .expect("transitiveImporters should be an array") + .iter() + .filter_map(|value| value.as_str()) + .collect::>(); + assert_eq!(importers, vec![main_uri.as_str()]); + + // Query references to `target` definition in lib1 (line 0, col 6) + client_conn + .sender + .send(Message::Request(references_request( + 2, &lib1_uri, 0, 6, false, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "References should succeed"); + let refs: Option> = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + let refs = refs.unwrap_or_default(); + assert!( + refs.iter() + .any(|location| location.uri.to_string() == main_uri), + "Expected cross-file reference from unopened main file, got: {refs:?}" + ); + + // Query references from a non-definition reference in lib1 (line 0, col 18) + client_conn + .sender + .send(Message::Request(references_request( + 21, &lib1_uri, 0, 18, false, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 21); + assert!(response.error.is_none(), "References should succeed"); + let refs: Option> = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + let refs = refs.unwrap_or_default(); + assert!( + refs.iter() + .any(|location| location.uri.to_string() == main_uri), + "Expected cross-file reference from unopened main file when queried from a local reference, got: {refs:?}" + ); + + // Update main on disk to import lib2 instead of lib1 + fs::write(&main_path, "local lib = import 'lib2.jsonnet'; lib.target") + .expect("main should be rewritten"); + + client_conn + .sender + .send(Message::Notification( + did_change_watched_files_notification(vec![FileEvent { + uri: main_uri.parse().expect("expected success"), + typ: FileChangeType::CHANGED, + }]), + )) + .expect("expected success"); + + // References to lib1 target should no longer include main + client_conn + .sender + .send(Message::Request(references_request( + 3, &lib1_uri, 0, 6, false, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 3); + assert!(response.error.is_none(), "References should succeed"); + let refs: Option> = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + let refs = refs.unwrap_or_default(); + assert!( + !refs + .iter() + .any(|location| location.uri.to_string() == main_uri), + "Main should no longer reference lib1 after watched-file update" + ); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_initialize_bootstraps_workspace_import_graph() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + fs::write(&lib_path, "{ value: 1 }").expect("lib should be written"); + fs::write(&main_path, "local lib = import 'lib.jsonnet'; lib.value") + .expect("main should be written"); + + let root_uri = file_uri(&tmp.path().canonicalize().expect("root should canonicalize")); + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_root_uri( + 1, &root_uri, + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let expected_result = json!({ + "file": lib_uri, + "transitiveImporters": [main_uri], + }); + client_conn + .sender + .send(Message::Request(custom_find_transitive_importers_request( + 2, &lib_uri, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "jrsonnet/findTransitiveImporters request should succeed" + ); + let actual_result = response.result.expect("request should return result"); + assert_eq!(actual_result, expected_result); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_initialize_bootstrap_large_workspace_startup() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + fs::write(&lib_path, "{ value: 1 }").expect("lib should be written"); + + const FILE_COUNT: usize = 300; + let mut expected_importers = std::collections::BTreeSet::new(); + for idx in 0..FILE_COUNT { + let file_path = tmp.path().join(format!("svc_{idx}.jsonnet")); + fs::write( + &file_path, + format!("local lib = import 'lib.jsonnet'; {{ name: 'svc-{idx}', value: lib.value }}"), + ) + .expect("workspace file should be written"); + let file_uri = file_uri(&file_path.canonicalize().expect("file should canonicalize")); + expected_importers.insert(file_uri.to_string()); + } + + let root_uri = file_uri(&tmp.path().canonicalize().expect("root should canonicalize")); + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_root_uri( + 1, &root_uri, + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(custom_find_transitive_importers_request( + 2, &lib_uri, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "jrsonnet/findTransitiveImporters request should succeed" + ); + let result = response.result.expect("request should return result"); + let importers = result + .get("transitiveImporters") + .expect("transitiveImporters key should exist") + .as_array() + .expect("transitiveImporters should be an array"); + let actual_importers = importers + .iter() + .filter_map(|value| value.as_str().map(ToOwned::to_owned)) + .collect::>(); + + assert_eq!(actual_importers, expected_importers); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_workspace_symbol_includes_unopened_workspace_files() { + let tmp = TempDir::new().expect("tempdir should be created"); + let closed_path = tmp.path().join("closed.jsonnet"); + let closed_text = "local workspaceOnly=1;workspaceOnly"; + fs::write(&closed_path, closed_text).expect("closed file should be written"); + + let root_uri = file_uri(&tmp.path().canonicalize().expect("root should canonicalize")); + let closed_uri = file_uri( + &closed_path + .canonicalize() + .expect("closed should canonicalize"), + ); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_root_uri( + 1, &root_uri, + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let symbols = request_workspace_symbols(&client_conn, 2, "workspaceOnly"); + + let expected_doc = jrsonnet_lsp_document::Document::new( + closed_text.to_string(), + jrsonnet_lsp_document::DocVersion::new(0), + ); + let expected_uri: lsp_types::Uri = closed_uri.parse().expect("expected success"); + let expected_symbols = Some(jrsonnet_lsp_handlers::workspace_symbols_for_document( + &expected_doc, + &expected_uri, + "workspaceOnly", + )); + assert_eq!(symbols, expected_symbols); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_workspace_symbol_ranks_exact_prefix_then_substring() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/workspace-symbol-ranking.jsonnet"; + let text = + "local needle = 1; local has_needle_inside = 2; local needlePrefix = 3; local zneedle = 4; needle"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + let symbols = request_workspace_symbols(&client_conn, 2, "needle"); + let expected_doc = jrsonnet_lsp_document::Document::new( + text.to_string(), + jrsonnet_lsp_document::DocVersion::new(1), + ); + let expected_uri: lsp_types::Uri = uri.parse().expect("expected success"); + let expected_all = jrsonnet_lsp_handlers::workspace_symbols_for_document( + &expected_doc, + &expected_uri, + "needle", + ); + let expected_symbols = vec![ + expected_all + .iter() + .find(|symbol| symbol.name == "needle") + .expect("expected exact match symbol") + .clone(), + expected_all + .iter() + .find(|symbol| symbol.name == "needlePrefix") + .expect("expected prefix match symbol") + .clone(), + expected_all + .iter() + .find(|symbol| symbol.name == "zneedle") + .expect("expected shorter substring symbol") + .clone(), + expected_all + .iter() + .find(|symbol| symbol.name == "has_needle_inside") + .expect("expected longer substring symbol") + .clone(), + ]; + assert_eq!(symbols, Some(expected_symbols)); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_workspace_symbol_caps_results_with_deterministic_order() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/workspace-symbol-cap.jsonnet"; + let text = { + let locals = (0..140) + .rev() + .map(|idx| format!("local capsymbol{idx:03} = {idx};")) + .collect::>() + .join(" "); + format!("{locals} capsymbol000") + }; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, &text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + + let symbols = request_workspace_symbols(&client_conn, 2, "capsymbol"); + let expected_doc = + jrsonnet_lsp_document::Document::new(text, jrsonnet_lsp_document::DocVersion::new(1)); + let expected_uri: lsp_types::Uri = uri.parse().expect("expected success"); + let expected_all = jrsonnet_lsp_handlers::workspace_symbols_for_document( + &expected_doc, + &expected_uri, + "capsymbol", + ); + let expected_symbols = (0..128) + .map(|idx| format!("capsymbol{idx:03}")) + .map(|name| { + expected_all + .iter() + .find(|symbol| symbol.name == name) + .expect("expected symbol to exist") + .clone() + }) + .collect::>(); + assert_eq!(symbols, Some(expected_symbols)); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_find_transitive_importers_returns_sorted_uris() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let a_path = tmp.path().join("a.jsonnet"); + let b_path = tmp.path().join("b.jsonnet"); + fs::write(&lib_path, "{ target: 1 }").expect("lib should be written"); + fs::write(&a_path, "local lib = import 'lib.jsonnet'; lib.target") + .expect("a should be written"); + fs::write(&b_path, "local lib = import 'lib.jsonnet'; lib.target") + .expect("b should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let a_uri = file_uri(&a_path.canonicalize().expect("a should canonicalize")); + let b_uri = file_uri(&b_path.canonicalize().expect("b should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + for (uri, text) in [ + (&lib_uri, "{ target: 1 }"), + (&a_uri, "local lib = import 'lib.jsonnet'; lib.target"), + (&b_uri, "local lib = import 'lib.jsonnet'; lib.target"), + ] { + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); + } + + client_conn + .sender + .send(Message::Request(custom_find_transitive_importers_request( + 2, &lib_uri, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Command should succeed"); + assert_eq!( + response.result.expect("command should return result"), + json!({ + "file": lib_uri, + "transitiveImporters": [a_uri, b_uri], + }) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_did_close_preserves_import_graph_for_references() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + + let lib_text = "local target = 1; target"; + let main_text = "local lib = import 'lib.jsonnet'; lib.target"; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Notification(did_close_notification(&main_uri))) + .expect("expected success"); + let closed_diagnostics = + recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); + assert_eq!( + closed_diagnostics, + lsp_types::PublishDiagnosticsParams { + uri: main_uri.parse().expect("expected success"), + diagnostics: Vec::new(), + version: None, + } + ); + + client_conn + .sender + .send(Message::Request(custom_find_transitive_importers_request( + 2, &lib_uri, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Command should succeed"); + let transitive_importers = response.result.expect("command should return result"); + assert_eq!( + transitive_importers, + json!({ + "file": lib_uri, + "transitiveImporters": [main_uri], + }) + ); + + client_conn + .sender + .send(Message::Request(references_request( + 3, &lib_uri, 0, 6, false, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 3); + assert!(response.error.is_none(), "References should succeed"); + let references: Option> = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + let references = references.unwrap_or_default(); + assert_eq!( + references, + vec![location(&lib_uri, 18, 24), location(&main_uri, 38, 44)] + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .expect("expected success"); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_cross_file_rename_updates_definition_and_importers() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ helper: function(x) x * 2 }").expect("lib should be written"); + fs::write( + &main_path, + "local lib = import 'lib.jsonnet'; lib.helper(1) + lib.helper(2)", + ) + .expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, + "{ helper: function(x) x * 2 }", + ))) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification( + did_change_watched_files_notification(vec![FileEvent { + uri: main_uri.parse().expect("expected success"), + typ: FileChangeType::CREATED, + }]), + )) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(rename_request(2, &lib_uri, 0, 2, "util"))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Rename should succeed"); + + let edit: Option = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + let edit = edit.expect("rename should produce workspace edit"); + let changes = edit.changes.expect("workspace edit should include changes"); + + let lib_edits = changes + .iter() + .find_map(|(uri, edits)| (uri.as_str() == lib_uri).then_some(edits)) + .expect("lib file should be edited"); + assert_eq!( + lib_edits, + &vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 2, + }, + end: lsp_types::Position { + line: 0, + character: 8, + }, + }, + new_text: "util".to_string(), + }], + "lib should have one definition rename edit", + ); + + let main_edits = changes + .iter() + .find_map(|(uri, edits)| (uri.as_str() == main_uri).then_some(edits)) + .expect("main importer should be edited"); + assert_eq!( + main_edits, + &vec![ + lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 38, + }, + end: lsp_types::Position { + line: 0, + character: 44, + }, + }, + new_text: "util".to_string(), + }, + lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 54, + }, + end: lsp_types::Position { + line: 0, + character: 60, + }, + }, + new_text: "util".to_string(), + }, + ], + "main should rename both helper references", + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_cross_file_references_resolve_jpath_importers() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_dir = tmp.path().join("jpath"); + let workspace_dir = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); + + let lib_path = jpath_dir.join("lib.libsonnet"); + let main_path = workspace_dir.join("main.jsonnet"); + let lib_text = "local target = 1; target"; + let main_text = r#"local lib = import "lib.libsonnet"; lib.target"#; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + json!({ + "jpath": [jpath_dir.to_string_lossy().to_string()], + }), + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(references_request( + 2, &lib_uri, 0, 6, false, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "References should succeed"); + let references: Option> = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + assert_eq!( + references.unwrap_or_default(), + vec![location(&lib_uri, 18, 24), location(&main_uri, 40, 46)] + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_cross_file_rename_updates_jpath_importers() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_dir = tmp.path().join("jpath"); + let workspace_dir = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); + + let lib_path = jpath_dir.join("lib.libsonnet"); + let main_path = workspace_dir.join("main.jsonnet"); + let lib_text = "{ helper: function(x) x * 2 }"; + let main_text = r#"local lib = import "lib.libsonnet"; lib.helper(1) + lib.helper(2)"#; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + json!({ + "jpath": [jpath_dir.to_string_lossy().to_string()], + }), + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(rename_request(2, &lib_uri, 0, 2, "util"))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Rename should succeed"); + + let edit: Option = + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); + let edit = edit.expect("rename should produce workspace edit"); + + let mut expected_changes = std::collections::HashMap::new(); + expected_changes.insert( + lib_uri.parse().expect("expected success"), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 8, + }, + }, + new_text: "util".to_string(), + }], + ); + expected_changes.insert( + main_uri.parse().expect("expected success"), + vec![ + lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 40, + }, + end: Position { + line: 0, + character: 46, + }, + }, + new_text: "util".to_string(), + }, + lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 56, + }, + end: Position { + line: 0, + character: 62, + }, + }, + new_text: "util".to_string(), + }, + ], + ); + assert_eq!( + edit, + lsp_types::WorkspaceEdit { + changes: Some(expected_changes), + document_changes: None, + change_annotations: None, + } + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_navigation_resolves_jpath_imports_from_graph() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_dir = tmp.path().join("jpath"); + let workspace_dir = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); + + let lib_path = jpath_dir.join("lib.libsonnet"); + let main_path = workspace_dir.join("main.jsonnet"); + let lib_text = "{ helper: 42 }"; + let main_text = r#"local lib = import "lib.libsonnet"; lib.helper"#; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + json!({ + "jpath": [jpath_dir.to_string_lossy().to_string()], + }), + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .expect("expected success"); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); + + client_conn + .sender + .send(Message::Request(goto_definition_request( + 2, &main_uri, 0, 22, + ))) + .expect("expected success"); + let import_definition_response = recv_response(&client_conn, 2); + assert!( + import_definition_response.error.is_none(), + "goto definition on import path should succeed" + ); + let import_definition: Option = serde_json::from_value( + import_definition_response + .result + .expect("should have goto definition result"), + ) + .expect("expected success"); + assert_eq!( + import_definition, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.parse().expect("lib URI should parse"), + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 14, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_definition_request( + 3, &main_uri, 0, 40, + ))) + .expect("expected success"); + let definition_response = recv_response(&client_conn, 3); + assert!( + definition_response.error.is_none(), + "goto definition should succeed" + ); + let definition: Option = serde_json::from_value( + definition_response + .result + .expect("should have definition result"), + ) + .expect("expected success"); + assert_eq!( + definition, + Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) + ); + + client_conn + .sender + .send(Message::Request(goto_declaration_request( + 4, &main_uri, 0, 40, + ))) + .expect("expected success"); + let declaration_response = recv_response(&client_conn, 4); + assert!( + declaration_response.error.is_none(), + "goto declaration should succeed" + ); + let declaration: Option = serde_json::from_value( + declaration_response + .result + .expect("should have declaration result"), + ) + .expect("expected success"); + assert_eq!( + declaration, + Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) + ); + + client_conn + .sender + .send(Message::Request(goto_type_definition_request( + 5, &main_uri, 0, 40, + ))) + .expect("expected success"); + let type_definition_response = recv_response(&client_conn, 5); + assert!( + type_definition_response.error.is_none(), + "goto type definition should succeed" + ); + let type_definition: Option = serde_json::from_value( + type_definition_response + .result + .expect("should have type definition result"), + ) + .expect("expected success"); + assert_eq!( + type_definition, + Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .expect("expected success"); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs new file mode 100644 index 00000000..d37fb195 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -0,0 +1,2 @@ +mod integration; +mod support; diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/conditional_comprehension_refines_element_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/conditional_comprehension_refines_element_type.yaml new file mode 100644 index 00000000..a7983d32 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/conditional_comprehension_refines_element_type.yaml @@ -0,0 +1,37 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(xs) = + assert std.isArray(xs); + assert std.all(std.map(function(x) x == null || std.isNumber(x), xs)); + local zs = [x for x in xs if x != null]; + local ys = [(if x == null then "no" else x - 1) for x in xs]; + { zs: ((m1:|))zs, ys: ((m2:|))ys }; + + f([1, null, 2]) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local zs = [x for x in xs if x != null];" +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local ys = [(if x == null then \"no\" else x - 1) for x in xs];" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml new file mode 100644 index 00000000..f64dc453 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml @@ -0,0 +1,40 @@ +steps: +- step: create + files: + main.jsonnet: | + local inc(x) = + assert std.isNumber(x); + x + 1; + + local f(xs) = + assert std.isArray(xs); + assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs)); + local ys = std.filterMap(std.isNumber, inc, ((m1:|))xs); + ((m2:|))ys + + f([1, "x", 2]) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local f(xs) =\n assert std.isArray(xs);\n assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs));\n local ys = std.filterMap(std.isNumber, inc, xs);\n ys\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local ys = std.filterMap(std.isNumber, inc, xs);" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml new file mode 100644 index 00000000..49f75cad --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml @@ -0,0 +1,36 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(xs) = + assert std.isArray(xs); + assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs)); + local ys = std.filter(std.isNumber, ((m1:|))xs); + ((m2:|))ys + + f([1, "x", 2]) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local f(xs) =\n assert std.isArray(xs);\n assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs));\n local ys = std.filter(std.isNumber, xs);\n ys\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local ys = std.filter(std.isNumber, xs);" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/higher_order_all_map_refines_array_elements.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/higher_order_all_map_refines_array_elements.yaml new file mode 100644 index 00000000..850916bd --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/higher_order_all_map_refines_array_elements.yaml @@ -0,0 +1,25 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(arr) = + if std.all(std.map(std.isNumber, arr)) then + ((m1:|))arr + else + arr; + + f([1, 2, 3]) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local f(arr) =\n if std.all(std.map(std.isNumber, arr)) then\n arr\n else\n arr;" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_after_string_assert.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_after_string_assert.yaml new file mode 100644 index 00000000..4c14aea7 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_after_string_assert.yaml @@ -0,0 +1,28 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert std.isString(x); + if x == "hi" then + "hey" + else if x == "bye" then + "see ya" + else + ((m1:|))x + + std.length(f("hello")) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert std.isString(x);\n if x == \"hi\" then\n \"hey\"\n else if x == \"bye\" then\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_partial_without_assert.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_partial_without_assert.yaml new file mode 100644 index 00000000..07591e7e --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_partial_without_assert.yaml @@ -0,0 +1,49 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if x == "hi" then + std.length(((m1:|))x) + else if x == "bye" then + std.length(((m2:|))x) + else + ((m3:|))x + + std.length(f("hello")) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "\"hi\"" + - preview: + language: "jsonnet" + value: "local f(x) =\n if x == \"hi\" then\n std.length(x)\n else if x == \"bye\" then\n std.length(x)\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "\"bye\"" + - preview: + language: "jsonnet" + value: "local f(x) =\n if x == \"hi\" then\n std.length(x)\n else if x == \"bye\" then\n std.length(x)\n..." +- step: requestHover + as: hover3 + file: main.jsonnet + at: m3 +- step: expectHover + request: hover3 + result: + - type: "any" + - preview: + language: "jsonnet" + value: "local f(x) =\n if x == \"hi\" then\n std.length(x)\n else if x == \"bye\" then\n std.length(x)\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml new file mode 100644 index 00000000..010b6b95 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml @@ -0,0 +1,37 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert std.isNumber(x) || std.isString(x); + if !std.isNumber(x) then + std.length(((m1:|))x) + else + ((m2:|))x + 1 + + f(3) + f("hi") + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert std.isNumber(x) || std.isString(x);\n if !std.isNumber(x) then\n std.length(x)\n else\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert std.isNumber(x) || std.isString(x);\n if !std.isNumber(x) then\n std.length(x)\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_and_length_guard.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_and_length_guard.yaml new file mode 100644 index 00000000..07ee1879 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_and_length_guard.yaml @@ -0,0 +1,26 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert x == null || std.isString(x); + if x != null && std.length(x) >= 10 then + ((m1:|))x + else + "Hi"; + + f(null) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert x == null || std.isString(x);\n if x != null && std.length(x) >= 10 then\n x\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_branch_split.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_branch_split.yaml new file mode 100644 index 00000000..841f9cc1 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_branch_split.yaml @@ -0,0 +1,38 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert x == null || std.isNumber(x); + if x != null then + ((m1:|))x + else + assert x == null; + ((m2:|))x + + [f(null), f(3)] + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert x == null || std.isNumber(x);\n if x != null then\n x\n else\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "null" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert x == null || std.isNumber(x);\n if x != null then\n x\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_guard_refines_non_null_branch.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_guard_refines_non_null_branch.yaml new file mode 100644 index 00000000..517edbc7 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_guard_refines_non_null_branch.yaml @@ -0,0 +1,26 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert x == null || std.isString(x); + if x != null then + std.length(((m1:|))x) + else + 0; + + f(null) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert x == null || std.isString(x);\n if x != null then\n std.length(x)\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/partial_numeric_predicates_refine_arithmetic_paths.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/partial_numeric_predicates_refine_arithmetic_paths.yaml new file mode 100644 index 00000000..8c169f1f --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/partial_numeric_predicates_refine_arithmetic_paths.yaml @@ -0,0 +1,50 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isDecimal(x) then + ((m1:|))x + 0.5 + else if std.isInteger(x) then + ((m2:|))x + 1 + else + null; + + local n = f(5); + if n == null then 5 else ((m3:|))n + 2 + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isDecimal(x) then\n x + 0.5\n else if std.isInteger(x) then\n x + 1\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isDecimal(x) then\n x + 0.5\n else if std.isInteger(x) then\n x + 1\n..." +- step: requestHover + as: hover3 + file: main.jsonnet + at: m3 +- step: expectHover + request: hover3 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local n = f(5);" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml new file mode 100644 index 00000000..3c71b9fc --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml @@ -0,0 +1,37 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert std.isNumber(x) || std.isString(x); + if std.isNumber(x) then + ((m1:|))x + 1 + else + std.length(((m2:|))x); + + f(3) + f("hi") + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert std.isNumber(x) || std.isString(x);\n if std.isNumber(x) then\n x + 1\n else\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert std.isNumber(x) || std.isString(x);\n if std.isNumber(x) then\n x + 1\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml new file mode 100644 index 00000000..f41f8ec5 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml @@ -0,0 +1,24 @@ +steps: +- step: create + files: + main.jsonnet: | + local wrap(f) = + assert std.isFunction(f); + assert std.length(f) == 2; + ((m1:|))f + + wrap(function(a, b) a + b) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "function(arg0: any, arg1: any)" + - preview: + language: "jsonnet" + value: "local wrap(f) =\n assert std.isFunction(f);\n assert std.length(f) == 2;\n f\n\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml new file mode 100644 index 00000000..dfce2f37 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml @@ -0,0 +1,23 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(xs) = + assert std.isArray(xs) && std.length(xs) == 3; + ((m1:|))xs + + f([1, 2, 3]) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "[any, any, any]" + - preview: + language: "jsonnet" + value: "local f(xs) =\n assert std.isArray(xs) && std.length(xs) == 3;\n xs\n\nf" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml new file mode 100644 index 00000000..11316106 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml @@ -0,0 +1,48 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x, y) = y + 1; + + if std.length(f) == 1 then + ((m1:|))f + else if std.length(f) == 3 then + ((m2:|))f + else if std.length(f) == 2 then + ((m3:|))f + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "local f(x, y) = y + 1;" +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "local f(x, y) = y + 1;" +- step: requestHover + as: hover3 + file: main.jsonnet + at: m3 +- step: expectHover + request: hover3 + result: + - type: "(x: any, y: any) -> number" + - preview: + language: "jsonnet" + value: "local f(x, y) = y + 1;" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_object_refines_impossible_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_object_refines_impossible_branches.yaml new file mode 100644 index 00000000..5c2b3f31 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_object_refines_impossible_branches.yaml @@ -0,0 +1,48 @@ +steps: +- step: create + files: + main.jsonnet: | + local x = { a: 1, b: "hi" }; + + if std.length(x) == 1 then + ((m1:|))x + else if std.length(x) == 3 then + ((m2:|))x + else if std.length(x) == 2 then + x.((m3:|))a + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "local x = { a: 1, b: \"hi\" };" +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "local x = { a: 1, b: \"hi\" };" +- step: requestHover + as: hover3 + file: main.jsonnet + at: m3 +- step: expectHover + request: hover3 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "x.a" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml new file mode 100644 index 00000000..27f1d1c8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml @@ -0,0 +1,39 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isFunction(x) then + if std.length(x) == 2 then + ((m1:|))x(3, 5) + else + ((m2:|))x + else + x; + + f(function(a, b) a + b) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "function(arg0: any, arg1: any)" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isFunction(x) then\n if std.length(x) == 2 then\n x(3, 5)\n else\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "function()" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isFunction(x) then\n if std.length(x) == 2 then\n x(3, 5)\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml new file mode 100644 index 00000000..7a3d9273 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml @@ -0,0 +1,39 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isFunction(x) then + if std.length(x) == 2 then + ((m1:|))x + else + ((m2:|))x + else + null; + + f(function(a, b) a + b) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "function(arg0: any, arg1: any)" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isFunction(x) then\n if std.length(x) == 2 then\n x\n else\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "function()" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isFunction(x) then\n if std.length(x) == 2 then\n x\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_object_composition_refines_by_shape.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_object_composition_refines_by_shape.yaml new file mode 100644 index 00000000..ba4ee5fa --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_object_composition_refines_by_shape.yaml @@ -0,0 +1,71 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isObject(x) then + if "a" in x && std.isString(x.a) then + if "b" in x && std.isNumber(x.b) then + if std.length(x) == 2 then + std.length(x.a) + x.((m1:|))b + else if std.length(x) == 1 then + ((m2:|))x + else if std.length(x) == 3 then + x.((m3:|))b + else + x.((m4:|))b + else + 0 + else + 0 + else + 0; + + f({ a: "hello", b: 4 }) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "x.b" +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isObject(x) then\n if \"a\" in x && std.isString(x.a) then\n if \"b\" in x && std.isNumber(x.b) then\n if std.length(x) == 2 then\n..." +- step: requestHover + as: hover3 + file: main.jsonnet + at: m3 +- step: expectHover + request: hover3 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "x.b" +- step: requestHover + as: hover4 + file: main.jsonnet + at: m4 +- step: expectHover + request: hover4 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "x.b" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_and_is_precise.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_and_is_precise.yaml new file mode 100644 index 00000000..b60fb55b --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_and_is_precise.yaml @@ -0,0 +1,25 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x && "bar" in x) then + x.((m1:|))foo + else + null; + + f({ foo: 1 }) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "any" + - preview: + language: "jsonnet" + value: "x.foo" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_eliminates_required_field.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_eliminates_required_field.yaml new file mode 100644 index 00000000..826bce73 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_eliminates_required_field.yaml @@ -0,0 +1,25 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x || "bar" in x) then + x.((m1:|))foo + else + null; + + f({ foo: 1 }) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "x.foo" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml new file mode 100644 index 00000000..86824a50 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml @@ -0,0 +1,25 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x || "bar" in x) then + ((m1:|))x + else + null; + + f({ foo: 1 }) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "{ bar: never, foo: never, ... }" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isObject(x) && \"foo\" in x && !(\"foo\" in x || \"bar\" in x) then\n x\n else\n null;" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_with_length_still_eliminates_field.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_with_length_still_eliminates_field.yaml new file mode 100644 index 00000000..bbbec979 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_with_length_still_eliminates_field.yaml @@ -0,0 +1,25 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x || std.length(x) == 5) then + x.((m1:|))foo + else + null; + + f({ foo: 1 }) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "x.foo" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_literal_chain_preserves_base_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_literal_chain_preserves_base_type.yaml new file mode 100644 index 00000000..9568a064 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_literal_chain_preserves_base_type.yaml @@ -0,0 +1,30 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert std.isObject(x) && std.isString(x.t); + if x.t == "foo" then + 1 + else if x.t == "bar" then + 2 + else if x.t == "quz" then + 3 + else + std.length(x.((m1:|))t) + + f({ t: "abc" }) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "x.t" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_presence_refines_object_shape.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_presence_refines_object_shape.yaml new file mode 100644 index 00000000..489d9b71 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_presence_refines_object_shape.yaml @@ -0,0 +1,26 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(obj) = + assert std.isObject(obj); + if "foo" in obj then + obj.((m1:|))foo + else + 0; + + f({ foo: 1 }) + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "any" + - preview: + language: "jsonnet" + value: "obj.foo" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml new file mode 100644 index 00000000..8288017c --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml @@ -0,0 +1,41 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(obj) = + assert std.isObject(obj); + if "a" in obj then + if std.isNumber(obj.a) then + obj.((m1:|))a + 7 + else + assert !std.isBoolean(obj.a) && obj.a != null; + std.length(obj.((m2:|))a) + else + std.length(obj); + + [f({ b: null }), f({ a: "hello" }), f({ a: 4 })] + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "obj.a" +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "string | function() | object | array" + - preview: + language: "jsonnet" + value: "obj.a" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_membership_known_union.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_membership_known_union.yaml new file mode 100644 index 00000000..bf3f434c --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_membership_known_union.yaml @@ -0,0 +1,26 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(b) = + local obj = if b then { foo: 3 } else {}; + if "foo" in obj then + obj.((m1:|))foo + else + 4; + + [f(true), f(false)] + +- step: diagnosticsSettled + +- step: requestHover + as: hover1 + file: main.jsonnet + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "obj.foo" diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/array_variable.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/array_variable.yaml new file mode 100644 index 00000000..ed139f4e --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/array_variable.yaml @@ -0,0 +1,17 @@ +steps: +- step: create + files: + main.jsonnet: local ((m1:|))arr = [1, 2, 3]; + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + - type: '[number, number, number]' + - preview: + language: jsonnet + value: |- + local arr = [1, 2, 3]; diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/boolean_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/boolean_literal.yaml new file mode 100644 index 00000000..6c02b040 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/boolean_literal.yaml @@ -0,0 +1,17 @@ +steps: +- step: create + files: + main.jsonnet: local b = ((m1:|))true; + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + - type: 'true' + - docs: Boolean literal `true`. + - docs: + language: jsonnet + value: "true" diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml new file mode 100644 index 00000000..6dc140a1 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml @@ -0,0 +1,19 @@ +steps: +- step: create + files: + main.jsonnet: | + local ((m1:|))f(x) = x; + f + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + - type: 'function(x: any)' + - preview: + language: jsonnet + value: |- + local f(x) = x; diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml new file mode 100644 index 00000000..f79644db --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml @@ -0,0 +1,35 @@ +steps: +- step: create + files: + main.libsonnet: | + local module = import './module/main.libsonnet'; + + { + build(input):: [ + module.n((m1:|))ew(input), + ], + } + module/main.libsonnet: | + local helper = import './helper.libsonnet'; + + { + new(input):: { + value: input, + }, + } + module/helper.libsonnet: | + import 1 + +- step: requestHover + as: hover_new + file: main.libsonnet + at: m1 +- step: expectHover + request: hover_new + result: + - type: '(input: any) -> { value: any }' + - context: '`new` from `./module/main.libsonnet`' + - preview: + language: jsonnet + value: |- + module.new diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/local_multiline_preview_trims_trailing_blank_lines.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/local_multiline_preview_trims_trailing_blank_lines.yaml new file mode 100644 index 00000000..a78371b9 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/local_multiline_preview_trims_trailing_blank_lines.yaml @@ -0,0 +1,50 @@ +steps: +- step: create + files: + main.jsonnet: | + local top = { + a: 1, + b: 2, + }; + + { + topUse: ((topUse:|))top, + local objLocal = { + c: 3, + d: 4, + }, + + objUse: ((objUse:|))objLocal, + } + +- step: requestHover + as: topHover + file: main.jsonnet + at: topUse +- step: expectHover + request: topHover + result: + - type: '{ a: number, b: number }' + - preview: + language: jsonnet + value: |- + local top = { + a: 1, + b: 2, + }; + +- step: requestHover + as: objHover + file: main.jsonnet + at: objUse +- step: expectHover + request: objHover + result: + - type: '{ c: number, d: number }' + - preview: + language: jsonnet + value: |- + local objLocal = { + c: 3, + d: 4, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/null_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/null_literal.yaml new file mode 100644 index 00000000..2cc5135d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/null_literal.yaml @@ -0,0 +1,17 @@ +steps: +- step: create + files: + main.jsonnet: local n = ((m1:|))null; + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + - type: 'null' + - docs: Literal `null` value. + - docs: + language: jsonnet + value: "null" diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/number_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/number_literal.yaml new file mode 100644 index 00000000..46faf4c8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/number_literal.yaml @@ -0,0 +1,17 @@ +steps: +- step: create + files: + main.jsonnet: local x = ((m1:|))42; + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + - type: number + - preview: + language: jsonnet + value: |- + 42 diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml new file mode 100644 index 00000000..f5ba2884 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml @@ -0,0 +1,17 @@ +steps: +- step: create + files: + main.jsonnet: "local ((m1:|))obj = { a: 1 };" + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + - type: '{ a: number }' + - preview: + language: jsonnet + value: |- + local obj = { a: 1 }; diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/std_object.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/std_object.yaml new file mode 100644 index 00000000..fb428ceb --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/std_object.yaml @@ -0,0 +1,17 @@ +steps: +- step: create + files: + main.jsonnet: ((m1:|))std.length + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + - type: object + - preview: + language: jsonnet + value: |- + std.length diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/string_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/string_literal.yaml new file mode 100644 index 00000000..c2164e58 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/string_literal.yaml @@ -0,0 +1,17 @@ +steps: +- step: create + files: + main.jsonnet: 'local s = ((m1:|))"hello";' + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + - type: string + - preview: + language: jsonnet + value: |- + "hello" diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/for_comprehension_binding.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/for_comprehension_binding.yaml new file mode 100644 index 00000000..84f36ad8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/for_comprehension_binding.yaml @@ -0,0 +1,20 @@ +steps: +- step: create + files: + main.jsonnet: "[((m2:|))x for ((m1:|((m3:|))))x in [1,2,3]]" + +- step: requestReferences + as: xRefs + file: main.jsonnet + at: m1 + include_declaration: true + +- step: expectReferences + request: xRefs + result: + - file: main.jsonnet + at: m2 + text: x + - file: main.jsonnet + at: m3 + text: x diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/function_param_usage.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/function_param_usage.yaml new file mode 100644 index 00000000..069f2690 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/function_param_usage.yaml @@ -0,0 +1,39 @@ +steps: +- step: create + files: + main.jsonnet: | + local add(((m1:|x)), ((m4:|y))) = + ((m3:|x)) + ((m6:|y)); + add(1, 2) + +- step: requestReferences + as: refsX + file: main.jsonnet + at: m1 + include_declaration: true + +- step: expectReferences + request: refsX + result: + - file: main.jsonnet + at: m1 + text: x + - file: main.jsonnet + at: m3 + text: x + +- step: requestReferences + as: refsY + file: main.jsonnet + at: m4 + include_declaration: true + +- step: expectReferences + request: refsY + result: + - file: main.jsonnet + at: m4 + text: y + - file: main.jsonnet + at: m6 + text: y diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/function_parameters.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/function_parameters.yaml new file mode 100644 index 00000000..524da96d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/function_parameters.yaml @@ -0,0 +1,36 @@ +steps: +- step: create + files: + main.jsonnet: local f(((m1:|a)), ((m4:|b)))((m6:|)) = ((m3:|a)) + ((m5:|b)); + +- step: requestReferences + as: refsA + file: main.jsonnet + at: m1 + include_declaration: true + +- step: expectReferences + request: refsA + result: + - file: main.jsonnet + at: m1 + text: a + - file: main.jsonnet + at: m3 + text: a + +- step: requestReferences + as: refsB + file: main.jsonnet + at: m4 + include_declaration: true + +- step: expectReferences + request: refsB + result: + - file: main.jsonnet + at: m4 + text: b + - file: main.jsonnet + at: m5 + text: b diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/local_variable_definition.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/local_variable_definition.yaml new file mode 100644 index 00000000..6e66f131 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/local_variable_definition.yaml @@ -0,0 +1,22 @@ +steps: +- step: create + files: + main.jsonnet: | + local [[decl:x]] = 1; + [[use:x]] + 1 + +- step: requestReferences + as: refs + file: main.jsonnet + at: decl + include_declaration: true + +- step: expectReferences + request: refs + result: + - file: main.jsonnet + at: decl + text: x + - file: main.jsonnet + at: use + text: x diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/multiple_usages.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/multiple_usages.yaml new file mode 100644 index 00000000..876f129a --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/multiple_usages.yaml @@ -0,0 +1,25 @@ +steps: +- step: create + files: + main.jsonnet: | + local [[decl:x]] = 1; + [[use1:x]] + [[use2:x]] + +- step: requestReferences + as: refs + file: main.jsonnet + at: decl + include_declaration: true + +- step: expectReferences + request: refs + result: + - file: main.jsonnet + at: decl + text: x + - file: main.jsonnet + at: use1 + text: x + - file: main.jsonnet + at: use2 + text: x diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/nested_local.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/nested_local.yaml new file mode 100644 index 00000000..7d73521e --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/nested_local.yaml @@ -0,0 +1,40 @@ +steps: +- step: create + files: + main.jsonnet: | + local [[outerDecl:outer]] = + local [[innerDecl:inner]] = 1; + [[innerUse:inner]] + 1; + [[outerUse:outer]] + +- step: requestReferences + as: refsOuter + file: main.jsonnet + at: outerDecl + include_declaration: true + +- step: expectReferences + request: refsOuter + result: + - file: main.jsonnet + at: outerDecl + text: outer + - file: main.jsonnet + at: outerUse + text: outer + +- step: requestReferences + as: refsInner + file: main.jsonnet + at: innerDecl + include_declaration: true + +- step: expectReferences + request: refsInner + result: + - file: main.jsonnet + at: innerDecl + text: inner + - file: main.jsonnet + at: innerUse + text: inner diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/object_local.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/object_local.yaml new file mode 100644 index 00000000..232d099d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/object_local.yaml @@ -0,0 +1,24 @@ +steps: +- step: create + files: + main.jsonnet: | + { + local ((m1:|((m2:|))))helper = 42, + value: ((m3:|))helper, + } + +- step: requestReferences + as: helperRefs + file: main.jsonnet + at: m1 + include_declaration: true + +- step: expectReferences + request: helperRefs + result: + - file: main.jsonnet + at: m2 + text: helper + - file: main.jsonnet + at: m3 + text: helper diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/shadowing_different_scopes.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/shadowing_different_scopes.yaml new file mode 100644 index 00000000..b4175467 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/shadowing_different_scopes.yaml @@ -0,0 +1,40 @@ +steps: +- step: create + files: + main.jsonnet: | + local [[outerDecl:x]] = 1; + local f([[innerDecl:x]]) = + [[innerUse:x]]; + [[outerUse:x]] + +- step: requestReferences + as: outerX + file: main.jsonnet + at: outerDecl + include_declaration: true + +- step: expectReferences + request: outerX + result: + - file: main.jsonnet + at: outerDecl + text: x + - file: main.jsonnet + at: outerUse + text: x + +- step: requestReferences + as: innerX + file: main.jsonnet + at: innerDecl + include_declaration: true + +- step: expectReferences + request: innerX + result: + - file: main.jsonnet + at: innerDecl + text: x + - file: main.jsonnet + at: innerUse + text: x diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/close_then_watched_change_then_references.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/close_then_watched_change_then_references.yaml new file mode 100644 index 00000000..f47b8028 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/close_then_watched_change_then_references.yaml @@ -0,0 +1,36 @@ +# Ensure closing one file does not drop reference data for another file, +# and that watched-file changes update dependency graph state. +steps: +- step: create + files: + lib.jsonnet: local ((m1:|))target = 1; ((m2:|))target + main.jsonnet: local lib = import "lib.jsonnet"; lib.target + +- step: diagnosticsSettled + +- step: close + file: main.jsonnet + +- step: writeFile + path: main.jsonnet + text: local lib = import "lib.jsonnet"; lib.other + +- step: notifyWatchedFiles + changes: + - path: main.jsonnet + type: changed + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterWatchedChange + file: lib.jsonnet + at: m1 + include_declaration: false + +- step: expectReferences + request: refsAfterWatchedChange + result: + - file: lib.jsonnet + at: m2 + text: target diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml new file mode 100644 index 00000000..2fc58324 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml @@ -0,0 +1,30 @@ +# Verify Evaluate code lens is emitted with a stable execute payload. +steps: +- step: create + files: + main.jsonnet: ((fileStart:|))local x = 1; x + +- step: diagnosticsSettled + +- step: requestCodeLens + as: lenses + file: main.jsonnet + +- step: requestExecuteCodeLens + as: evaluateViaCodeLens + request: lenses + index: 0 + +- step: expectCodeLens + request: lenses + result: + - range: fileStart + command: + title: Evaluate + command: jrsonnet.evalFile + arguments: + - file: main.jsonnet + +- step: expectExecuteCodeLens + request: evaluateViaCodeLens + result: 1 diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_bracket_lookup_fields.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_bracket_lookup_fields.yaml new file mode 100644 index 00000000..3c741e43 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_bracket_lookup_fields.yaml @@ -0,0 +1,36 @@ +# Verify completion through bracket object access resolves the nested object type: +# `hm["foo"].` should return exactly the fields `a`, `b`, and `c`. +steps: +- step: create + files: + main.jsonnet: | + local hm = { + foo: { + a: true, + b: 4, + c: "hi", + }, + }; + + hm["foo"].((m1:|)) + +- step: diagnosticsSettled + +- step: requestCompletion + as: bracketCompletion + file: main.jsonnet + at: m1 +- step: expectCompletion + request: bracketCompletion + result: + isIncomplete: false + items: + - label: a + kind: 5 + detail: "true" + - label: b + kind: 5 + detail: number + - label: c + kind: 5 + detail: string diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_nested_object_fields.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_nested_object_fields.yaml new file mode 100644 index 00000000..52757e16 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_nested_object_fields.yaml @@ -0,0 +1,25 @@ +# Verify deep object-path completion returns only nested fields at the cursor. +steps: +- step: create + files: + main.jsonnet: | + local obj = { inner: { x: 1, y: 2 } }; + obj.inner.((m1:|)) + +- step: diagnosticsSettled + +- step: requestCompletion + as: nestedFields + file: main.jsonnet + at: m1 +- step: expectCompletion + request: nestedFields + result: + isIncomplete: false + items: + - label: x + kind: 5 + detail: number + - label: y + kind: 5 + detail: number diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_non_identifier_field_text_edit.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_non_identifier_field_text_edit.yaml new file mode 100644 index 00000000..fbf3cca1 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_non_identifier_field_text_edit.yaml @@ -0,0 +1,35 @@ +# Verify completion for non-identifier object fields uses bracket rewrite text edits. +# `obj.` should include: +# - `"my-field"` with a text edit rewriting `.` +# - `normal` as a plain field completion. +steps: +- step: create + files: + main.jsonnet: | + local obj = { + "my-field": 1, + normal: 2, + }; + obj[[dotBeforeCompletion:.]]((m1:|)) + +- step: diagnosticsSettled + +- step: requestCompletion + as: objectFieldCompletion + file: main.jsonnet + at: m1 +- step: expectCompletion + request: objectFieldCompletion + result: + isIncomplete: false + items: + - label: my-field + kind: 5 + detail: number + textEdit: + range: + rangeOf: dotBeforeCompletion + newText: "[\"my-field\"]" + - label: normal + kind: 5 + detail: number diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_syntax_error_scope.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_syntax_error_scope.yaml new file mode 100644 index 00000000..27f450e3 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_syntax_error_scope.yaml @@ -0,0 +1,39 @@ +# Ensure completion remains useful when the current expression is syntactically +# incomplete: in-scope locals and object keywords should still be offered. +steps: +- step: create + files: + main.jsonnet: | + local x = 1; + local y = 2; + { foo: x +((m1:|)) + +- step: diagnosticsSettled + +- step: requestCompletion + as: completionAtError + file: main.jsonnet + at: m1 +- step: expectCompletion + request: completionAtError + result: + isIncomplete: false + items: + - label: x + kind: 6 + detail: local variable + - label: y + kind: 6 + detail: local variable + - label: std + kind: 9 + detail: Jsonnet standard library + - label: $ + kind: 14 + detail: Reference to root object + - label: self + kind: 14 + detail: Reference to current object + - label: super + kind: 14 + detail: Reference to inherited object diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_union_nested_flow.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_union_nested_flow.yaml new file mode 100644 index 00000000..0cd15c61 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_union_nested_flow.yaml @@ -0,0 +1,32 @@ +# Verify nested union object construction keeps member completion precise: +# `obj.a.` should offer exactly the shared field `b`. +steps: +- step: create + files: + main.jsonnet: | + local f(x, y, z) = + assert std.isBoolean(x) && std.isBoolean(y) && std.isBoolean(z); + local obj = { + a: + if x then + { b: if y then 1 else false } + else + { b: if z then true else "hi" }, + }; + obj.a.((m1:|)) + f(true, false, true) + +- step: diagnosticsSettled + +- step: requestCompletion + as: unionFieldCompletion + file: main.jsonnet + at: m1 +- step: expectCompletion + request: unionFieldCompletion + result: + isIncomplete: false + items: + - label: b + kind: 5 + detail: "true | false | number | string" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/config_change_code_actions.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/config_change_code_actions.yaml new file mode 100644 index 00000000..90ea2503 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/config_change_code_actions.yaml @@ -0,0 +1,110 @@ +# Verify config updates take effect immediately for code-action policy: +# default policy offers import-removal variants, restricted policy does not. +steps: +- step: create + files: + scenario-code-action-policy.jsonnet: ((rangeStart:|))local ((x:|x)) = import "foo.libsonnet"; 42 + +- step: diagnosticsSettled + +- step: requestCodeAction + as: defaultPolicy + file: scenario-code-action-policy.jsonnet + at: rangeStart + text: local x = import "foo.libsonnet"; 42 + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + +- step: expectCodeAction + request: defaultPolicy + result: + - title: Prefix `x` with `_` + kind: quickfix + isPreferred: true + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-code-action-policy.jsonnet: + - at: x + text: x + replace: _x + + - title: Remove unused binding `x` + kind: quickfix + isPreferred: false + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-code-action-policy.jsonnet: + - at: rangeStart + len: 32 + replace: "" + + - title: Remove all unused bindings + kind: source.fixAll + isPreferred: false + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-code-action-policy.jsonnet: + - at: rangeStart + len: 32 + replace: "" + +- step: config + settings: + jsonnet: + codeActions: + removeUnused: nonImportBindings + +- step: requestCodeAction + as: nonImportBindingsPolicy + file: scenario-code-action-policy.jsonnet + at: rangeStart + text: local x = import "foo.libsonnet"; 42 + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + +- step: expectCodeAction + request: nonImportBindingsPolicy + result: + - title: Prefix `x` with `_` + kind: quickfix + isPreferred: true + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-code-action-policy.jsonnet: + - at: x + text: x + replace: _x diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_navigation_roundtrip.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_navigation_roundtrip.yaml new file mode 100644 index 00000000..755d3773 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_navigation_roundtrip.yaml @@ -0,0 +1,103 @@ +# Exercise cross-file rename end-to-end, then validate navigation APIs on +# the updated text state after edits are applied. +steps: +- step: create + files: + lib.jsonnet: "{ ((libField:|helper)): function(x) x * 2 }" + main.jsonnet: "local lib = import \"lib.jsonnet\"; lib.((mainField:|helper))(2) + lib.((mainField2:|helper))(3)" + +- step: diagnosticsSettled + +- step: requestDefinition + as: defBefore + file: main.jsonnet + at: mainField +- step: expectDefinition + request: defBefore + result: + file: lib.jsonnet + at: libField + text: helper + +- step: requestRename + as: renameHelper + file: lib.jsonnet + at: libField + new_name: util + +- step: expectRename + request: renameHelper + result: + edits: + lib.jsonnet: + - at: libField + text: helper + replace: util + main.jsonnet: + - at: mainField + text: helper + replace: util + - at: mainField2 + text: helper + replace: util + +- step: changeFull + file: lib.jsonnet + text: "{ ((libFieldAfter:|util)): function(x) x * 2 }" + version: 2 + +- step: changeFull + file: main.jsonnet + text: "local lib = import \"lib.jsonnet\"; lib.((mainFieldAfter:|util))(2) + lib.((mainFieldAfter2:|util))(3)" + version: 2 + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfter + file: lib.jsonnet + at: libFieldAfter + include_declaration: false + +- step: expectReferences + request: refsAfter + result: + - file: main.jsonnet + at: mainFieldAfter + text: util + - file: main.jsonnet + at: mainFieldAfter2 + text: util + +- step: requestDefinition + as: defAfter + file: main.jsonnet + at: mainFieldAfter +- step: expectDefinition + request: defAfter + result: + file: lib.jsonnet + at: libFieldAfter + text: util + +- step: requestDeclaration + as: declAfter + file: main.jsonnet + at: mainFieldAfter +- step: expectDeclaration + request: declAfter + result: + file: lib.jsonnet + at: libFieldAfter + text: util + +- step: requestTypeDefinition + as: typeDefAfter + file: main.jsonnet + at: mainFieldAfter2 +- step: expectTypeDefinition + request: typeDefAfter + result: + file: lib.jsonnet + at: libFieldAfter + text: util diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_references_roundtrip.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_references_roundtrip.yaml new file mode 100644 index 00000000..fa0d8949 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_references_roundtrip.yaml @@ -0,0 +1,70 @@ +# Verify cross-file rename of an exported local keeps references and navigation +# coherent after applying edits to both source and importer files. +steps: +- step: create + files: + lib.jsonnet: local ((libDecl:|target)) = 1; ((libUse:|target)) + main.jsonnet: local lib = import "lib.jsonnet"; lib.((mainUse:|target)) + +- step: diagnosticsSettled + +- step: requestRename + as: renameTarget + file: lib.jsonnet + at: libDecl + new_name: renamed + +- step: expectRename + request: renameTarget + result: + edits: + lib.jsonnet: + - at: libDecl + text: target + replace: renamed + - at: libUse + text: target + replace: renamed + main.jsonnet: + - at: mainUse + text: target + replace: renamed + +- step: changeFull + file: lib.jsonnet + text: local ((libDeclAfter:|renamed)) = 1; ((libUseAfter:|renamed)) + version: 2 + +- step: changeFull + file: main.jsonnet + text: local lib = import "lib.jsonnet"; lib.((mainUseAfter:|renamed)) + version: 2 + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterRename + file: lib.jsonnet + at: libDeclAfter + include_declaration: false + +- step: expectReferences + request: refsAfterRename + result: + - file: lib.jsonnet + at: libUseAfter + text: renamed + - file: main.jsonnet + at: mainUseAfter + text: renamed + +- step: requestDefinition + as: defAfterRename + file: main.jsonnet + at: mainUseAfter +- step: expectDefinition + request: defAfterRename + result: + file: lib.jsonnet + at: libDeclAfter + text: renamed diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/custom_eval_file_request.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/custom_eval_file_request.yaml new file mode 100644 index 00000000..022ade64 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/custom_eval_file_request.yaml @@ -0,0 +1,24 @@ +# Verify custom request execution for eval file is covered independently. +# Use a multi-file input so the request is exercised on a non-trivial program. +steps: + - step: create + files: + lib.libsonnet: | + function(v) v + 2 + main.jsonnet: | + local plus = import "./lib.libsonnet"; + plus(30) + + - step: diagnosticsSettled + + - step: requestCustom + as: evaluateViaCustom + method: jrsonnet/evalFile + params: + textDocument: + uri: + file: main.jsonnet + + - step: expectCustom + request: evaluateViaCustom + result: 32 diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_eval_reports_source_span.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_eval_reports_source_span.yaml new file mode 100644 index 00000000..a57ad5fb --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_eval_reports_source_span.yaml @@ -0,0 +1,22 @@ +# Verify eval diagnostics map to a concrete in-file span instead of 0:0. +steps: +- step: create + files: + main.jsonnet: ((m1:|))undefined_var + +- step: config + settings: + jsonnet: + enableEvalDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: undefined_var + severity: error + code: eval-error + source: jrsonnet-eval + message: "local is not defined: undefined_var" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_available_list.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_available_list.yaml new file mode 100644 index 00000000..49e92a5d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_available_list.yaml @@ -0,0 +1,25 @@ +# Verify type-error diagnostics fall back to listing available fields when no +# close suggestion exists. +steps: +- step: create + files: + main.jsonnet: | + local dims = { length: 1, width: 2 }; + ((m1:|))dims.xyz + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: dims.xyz + severity: warning + code: type-error + source: jrsonnet-lint + message: "no such field `xyz`; available fields: length, width" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_with_suggestion.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_with_suggestion.yaml new file mode 100644 index 00000000..dbb44374 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_with_suggestion.yaml @@ -0,0 +1,25 @@ +# Verify type-error diagnostics include did-you-mean suggestions for close field typos. +# `dims.lenght` should suggest `length`. +steps: +- step: create + files: + main.jsonnet: | + local dims = { length: 1, width: 2 }; + ((m1:|))dims.lenght + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: dims.lenght + severity: warning + code: type-error + source: jrsonnet-lint + message: "no such field `lenght`; did you mean `length`?" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_save_roundtrip.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_save_roundtrip.yaml new file mode 100644 index 00000000..96c46fe5 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_save_roundtrip.yaml @@ -0,0 +1,38 @@ +# Verify diagnostics clear after a full-document edit plus save cycle. +steps: +- step: create + files: + main.jsonnet: local ((m1:|))unused = 1; 42 + + # Lint diagnostics are opt-in; enable them so unused-variable appears. +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: "unused variable: `unused`; prefix with `_` to silence this warning" + +- step: changeFull + file: main.jsonnet + text: "42" + version: 2 + +- step: save + file: main.jsonnet + text: + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_coverage_matrix.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_coverage_matrix.yaml new file mode 100644 index 00000000..a21c652d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_coverage_matrix.yaml @@ -0,0 +1,48 @@ +# Type diagnostics should cover arity, named-argument, and field-access mistakes. +steps: +- step: create + files: + wrong_arg_count.jsonnet: ((mArgCount:|))std.length() + wrong_named_arg.jsonnet: | + local add(x, y) = x + y; + add(((mNamedArg:|))z = 1) + unknown_field_access.jsonnet: | + local obj = { known: 1 }; + ((mUnknownField:|))obj.missing + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: wrong_arg_count.jsonnet + diagnostics: + - at: mArgCount + text: std.length() + severity: warning + code: type-error + source: jrsonnet-lint + message: "`std.length` requires at least 1 argument(s), but 0 provided" + +- step: expectDiagnostics + file: wrong_named_arg.jsonnet + diagnostics: + - at: mNamedArg + text: z = 1 + severity: warning + code: type-error + source: jrsonnet-lint + message: "`add` has no parameter named `z`; expected one of: `x`, `y`" + +- step: expectDiagnostics + file: unknown_field_access.jsonnet + diagnostics: + - at: mUnknownField + text: obj.missing + severity: warning + code: type-error + source: jrsonnet-lint + message: "no such field `missing`; available fields: known" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_mismatch_dedup_eval.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_mismatch_dedup_eval.yaml new file mode 100644 index 00000000..f7bf9326 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_mismatch_dedup_eval.yaml @@ -0,0 +1,24 @@ +# Verify that enabling both lint + eval does not emit duplicate diagnostics for +# one underlying type mismatch. +steps: +- step: create + files: + main.jsonnet: std.length(((m1:|))1) + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + enableEvalDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: "1" + severity: warning + code: type-error + source: jrsonnet-lint + message: "`std.length` argument 1 (`x`) expects `string | object | function() | array`, got `number`" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/document_symbol_local_and_object.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/document_symbol_local_and_object.yaml new file mode 100644 index 00000000..e1dfbe51 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/document_symbol_local_and_object.yaml @@ -0,0 +1,30 @@ +# Verify `textDocument/documentSymbol` returns both: +# - a local binding symbol for `x` +# - an object field symbol for `result` +# and that each symbol carries the expected kind and exact ranges. +steps: +- step: create + files: + main.jsonnet: | + local [[localBinding:x = 1]]; + { [[resultField:result: x]] } + +- step: diagnosticsSettled + +- step: requestDocumentSymbol + as: docSymbols + file: main.jsonnet + +- step: expectDocumentSymbol + request: docSymbols + result: + - name: x + kind: 13 + range: {rangeOf: localBinding} + selectionRange: {rangeOf: localBinding} + children: + - name: result + kind: 8 + range: {rangeOf: resultField} + selectionRange: {rangeOf: resultField} + children: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/fix_all_unused_multifile_comments_preserved.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/fix_all_unused_multifile_comments_preserved.yaml new file mode 100644 index 00000000..a5c12a81 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/fix_all_unused_multifile_comments_preserved.yaml @@ -0,0 +1,81 @@ +# Validate source.fixAll for unused bindings in multiple files while keeping +# surrounding comments intact by applying precise edit ranges only. +steps: +- step: create + files: + main.jsonnet: | + ((m1:|))// main heading + ((m4:|))local ((m2:|((m3:|))))main_unused = import "lib.jsonnet"; + 42 + lib.jsonnet: | + ((m5:|))// lib heading + ((m8:|))local ((m6:|((m7:|))))lib_unused = 1; + 7 + +- step: diagnosticsSettled + +- step: requestCodeAction + as: fixMain + file: main.jsonnet + at: m1 + len: 1 + only: [source.fixAll] + diagnostics: + - at: m2 + text: main_unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + +- step: expectCodeAction + request: fixMain + result: + - title: Remove all unused bindings + kind: source.fixAll + isPreferred: false + diagnostics: + - at: m3 + text: main_unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + main.jsonnet: + - at: m4 + len: 40 + replace: "" + +- step: requestCodeAction + as: fixLib + file: lib.jsonnet + at: m5 + len: 1 + only: [source.fixAll] + diagnostics: + - at: m6 + text: lib_unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + +- step: expectCodeAction + request: fixLib + result: + - title: Remove all unused bindings + kind: source.fixAll + isPreferred: false + diagnostics: + - at: m7 + text: lib_unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + lib.jsonnet: + - at: m8 + len: 20 + replace: "" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_config_updates_apply.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_config_updates_apply.yaml new file mode 100644 index 00000000..108b4989 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_config_updates_apply.yaml @@ -0,0 +1,76 @@ +# Verify formatting configuration updates affect subsequent textDocument/formatting requests. +steps: +- step: create + files: + string.jsonnet: "{a:'x'}" + empty.jsonnet: "{}" + +- step: diagnosticsSettled + +- step: requestFormatting + as: defaultStringFormatting + file: string.jsonnet + +- step: expectFormatting + request: defaultStringFormatting + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 7 + newText: | + { + a: 'x', + } + +- step: config + settings: + jsonnet: + formatting: + indent: 2 + string_style: double + +- step: requestFormatting + as: configuredStringFormatting + file: string.jsonnet + +- step: expectFormatting + request: configuredStringFormatting + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 7 + newText: | + { + a: "x", + } + +- step: config + settings: + jsonnet: + formatting: + pad_objects: false + +- step: requestFormatting + as: configuredEmptyFormatting + file: empty.jsonnet + +- step: expectFormatting + request: configuredEmptyFormatting + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 2 + newText: | + {} diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_default_returns_full_edit.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_default_returns_full_edit.yaml new file mode 100644 index 00000000..60da340b --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_default_returns_full_edit.yaml @@ -0,0 +1,26 @@ +# Verify textDocument/formatting returns a full-document replacement edit by default. +steps: +- step: create + files: + main.jsonnet: "{a:1}" + +- step: diagnosticsSettled + +- step: requestFormatting + as: defaultFormatting + file: main.jsonnet + +- step: expectFormatting + request: defaultFormatting + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 5 + newText: | + { + a: 1, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_outside_requested_range_noop.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_outside_requested_range_noop.yaml new file mode 100644 index 00000000..4087b60c --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_outside_requested_range_noop.yaml @@ -0,0 +1,21 @@ +# Verify textDocument/rangeFormatting returns no edits when formatter changes +# would touch content outside the requested range. +steps: +- step: create + files: + main.jsonnet: | + { + [[target:a: 1,]] + b:2, + } + +- step: diagnosticsSettled + +- step: requestRangeFormatting + as: formattingRange + file: main.jsonnet + range: target + +- step: expectRangeFormatting + request: formattingRange + result: [] diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_request_options_apply.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_request_options_apply.yaml new file mode 100644 index 00000000..72667799 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_request_options_apply.yaml @@ -0,0 +1,27 @@ +# Verify range formatting honors request indentation options. +steps: +- step: create + files: + main.jsonnet: "((wholeStart:|)){a:{b:1}}" + +- step: diagnosticsSettled + +- step: requestRangeFormatting + as: formattingRange + file: main.jsonnet + at: wholeStart + len: 9 + insert_spaces: false + tab_size: 8 + +- step: expectRangeFormatting + request: formattingRange + result: + - range: + start: + line: 0 + character: 1 + end: + line: 0 + character: 9 + newText: "\n\ta: {\n\t\tb: 1,\n\t},\n}\n" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_within_requested_range.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_within_requested_range.yaml new file mode 100644 index 00000000..1f01aae8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_within_requested_range.yaml @@ -0,0 +1,29 @@ +# Verify textDocument/rangeFormatting returns edits when formatter changes +# are contained in the requested range. +steps: +- step: create + files: + main.jsonnet: | + { + a: 1, + [[target:b:2,]] + } + +- step: diagnosticsSettled + +- step: requestRangeFormatting + as: formattingRange + file: main.jsonnet + range: target + +- step: expectRangeFormatting + request: formattingRange + result: + - range: + start: + line: 2 + character: 4 + end: + line: 2 + character: 4 + newText: " " diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_option_combinations.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_option_combinations.yaml new file mode 100644 index 00000000..bd4de4ec --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_option_combinations.yaml @@ -0,0 +1,70 @@ +# Verify request-level formatting option combinations for LSP formatting. +steps: +- step: create + files: + main.jsonnet: "{a:1}" + +- step: diagnosticsSettled + +- step: requestFormatting + as: trimFinalOnly + file: main.jsonnet + trim_final_newlines: true + +- step: expectFormatting + request: trimFinalOnly + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 5 + newText: |- + { + a: 1, + } + +- step: requestFormatting + as: trimThenInsertFinal + file: main.jsonnet + trim_trailing_whitespace: true + trim_final_newlines: true + insert_final_newline: true + +- step: expectFormatting + request: trimThenInsertFinal + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 5 + newText: | + { + a: 1, + } + +- step: requestFormatting + as: trimAndSuppressFinal + file: main.jsonnet + trim_trailing_whitespace: true + insert_final_newline: false + +- step: expectFormatting + request: trimAndSuppressFinal + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 5 + newText: |- + { + a: 1, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_tabs_spaces_tabsize.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_tabs_spaces_tabsize.yaml new file mode 100644 index 00000000..45ef1061 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_tabs_spaces_tabsize.yaml @@ -0,0 +1,72 @@ +# Verify formatting indentation is controlled by request options: +# insert_spaces + tab_size. +steps: +- step: create + files: + main.jsonnet: "{a:{b:1}}" + +- step: diagnosticsSettled + +- step: requestFormatting + as: tabs + file: main.jsonnet + insert_spaces: false + tab_size: 8 + +- step: expectFormatting + request: tabs + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 9 + newText: "{\n\ta: {\n\t\tb: 1,\n\t},\n}\n" + +- step: requestFormatting + as: spaces2 + file: main.jsonnet + insert_spaces: true + tab_size: 2 + +- step: expectFormatting + request: spaces2 + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 9 + newText: | + { + a: { + b: 1, + }, + } + +- step: requestFormatting + as: spaces4 + file: main.jsonnet + insert_spaces: true + tab_size: 4 + +- step: expectFormatting + request: spaces4 + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 9 + newText: | + { + a: { + b: 1, + }, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_trailing_newline_input_options.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_trailing_newline_input_options.yaml new file mode 100644 index 00000000..42c822df --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_trailing_newline_input_options.yaml @@ -0,0 +1,69 @@ +# Verify formatting request newline options when the source file has +# trailing newlines. +steps: +- step: create + files: + main.jsonnet: "{a:1}\n\n" + +- step: diagnosticsSettled + +- step: requestFormatting + as: trimFinal + file: main.jsonnet + trim_final_newlines: true + +- step: expectFormatting + request: trimFinal + result: + - range: + start: + line: 0 + character: 0 + end: + line: 2 + character: 0 + newText: |- + { + a: 1, + } + +- step: requestFormatting + as: keepFinal + file: main.jsonnet + trim_final_newlines: false + +- step: expectFormatting + request: keepFinal + result: + - range: + start: + line: 0 + character: 0 + end: + line: 2 + character: 0 + newText: | + { + a: 1, + } + +- step: requestFormatting + as: trimThenInsertOne + file: main.jsonnet + trim_final_newlines: true + insert_final_newline: true + +- step: expectFormatting + request: trimThenInsertOne + result: + - range: + start: + line: 0 + character: 0 + end: + line: 2 + character: 0 + newText: | + { + a: 1, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_bracket_lookup_field_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_bracket_lookup_field_type.yaml new file mode 100644 index 00000000..383a9de8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_bracket_lookup_field_type.yaml @@ -0,0 +1,29 @@ +# Verify hover reflects field precision through a string-literal bracket lookup: +# `hm["foo"].a` should preserve the literal `true` type for `a`. +steps: +- step: create + files: + main.jsonnet: | + local hm = { + foo: { + a: true, + b: 4, + c: "hi", + }, + }; + hm["foo"].((m1:|))a + +- step: diagnosticsSettled + +- step: requestHover + as: bracketFieldHover + file: main.jsonnet + at: m1 +- step: expectHover + request: bracketFieldHover + result: + - type: "true" + - preview: + language: jsonnet + value: |- + hm["foo"].a diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml new file mode 100644 index 00000000..638ca6d4 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml @@ -0,0 +1,26 @@ +# Verify hover preserves CJK object key names across an import boundary. +# The imported value should still expose the `"水"` field in the rendered type. +steps: +- step: create + files: + a.jsonnet: | + { "水": 1 + 2 } + b.jsonnet: | + local imported = import "a.jsonnet"; + ((m1:|))imported["水"] + +- step: diagnosticsSettled + +- step: requestHover + as: importedTypeHasCjkField + file: b.jsonnet + at: m1 +- step: expectHover + request: importedTypeHasCjkField + result: + - type: '{ 水: number }' + - context: '`a.jsonnet`' + - preview: + language: jsonnet + value: |- + local imported = import "a.jsonnet"; diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml new file mode 100644 index 00000000..cdcbaa8b --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml @@ -0,0 +1,21 @@ +# Hover on structural braces should not emit object-level noise. +steps: +- step: create + files: + main.jsonnet: | + { + z: { + a: 1, + b: 2, + }, + ((closeBrace:|))} + +- step: diagnosticsSettled + +- step: requestHover + as: closeBraceHover + file: main.jsonnet + at: closeBrace +- step: expectHover + request: closeBraceHover + result: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml new file mode 100644 index 00000000..72648dee --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml @@ -0,0 +1,40 @@ +# Smoke-test heterogeneous request handling in one scenario: +# hover, completion, and custom request/expect flow. +steps: +- step: create + files: + lib.jsonnet: local value = 1; value + main.jsonnet: ((m1:|((m2:|))))local lib = import 'lib.jsonnet'; lib.value + +- step: diagnosticsSettled + +- step: requestHover + as: hoverRequest + file: main.jsonnet + at: m1 +- step: expectHover + request: hoverRequest + result: + +- step: requestCompletion + as: completionRequest + file: main.jsonnet + at: m2 +- step: expectCompletion + request: completionRequest + result: + isIncomplete: false + items: + - label: std + kind: 9 + detail: Jsonnet standard library + +- step: requestCustom + as: evalExpression + method: jrsonnet/evalExpression + params: + expression: 1 + 2 + +- step: expectCustom + request: evalExpression + result: 3 diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_consistency_noise.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_consistency_noise.yaml new file mode 100644 index 00000000..bf1e6b76 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_consistency_noise.yaml @@ -0,0 +1,40 @@ +# Keep stdlib hover rich while ensuring structural-token hover remains quiet. +steps: +- step: create + files: + stdlib_symbol.jsonnet: | + std.((stdLength:|))length('abc') + no_hover.jsonnet: | + ((openBrace:|)){ + answer: 42, + } + +- step: diagnosticsSettled + +- step: requestHover + as: stdlibHover + file: stdlib_symbol.jsonnet + at: stdLength +- step: expectHover + request: stdlibHover + result: + - type: 'function(x: string | object | function() | array)' + - docs: + language: jsonnet + value: std.length(x) + - docs: Returns the length of an array, string, object, or function parameters. + - docs: "**Example:**" + - docs: + language: jsonnet + value: std.length([1,2,3]) // 3 + - preview: + language: jsonnet + value: std.length + +- step: requestHover + as: noHover + file: no_hover.jsonnet + at: openBrace +- step: expectHover + request: noHover + result: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml new file mode 100644 index 00000000..e04069c0 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml @@ -0,0 +1,42 @@ +# Exercise hover behavior at non-symbol positions (should be null) and on +# a real symbol site (should yield a concrete inferred type). +steps: +- step: create + files: + main.jsonnet: | + local((m1:|)) x ((m2:|))= 1; + ((m3:|))x + +- step: diagnosticsSettled + + # Whitespace between `local` and `x`. +- step: requestHover + as: hoverWhitespace + file: main.jsonnet + at: m1 +- step: expectHover + request: hoverWhitespace + result: + + # Operator token. +- step: requestHover + as: hoverEquals + file: main.jsonnet + at: m2 +- step: expectHover + request: hoverEquals + result: + + # Real symbol usage should still resolve to a type. +- step: requestHover + as: hoverUsage + file: main.jsonnet + at: m3 +- step: expectHover + request: hoverUsage + result: + - type: number + - preview: + language: jsonnet + value: |- + local x = 1; diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml new file mode 100644 index 00000000..eabaf388 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml @@ -0,0 +1,52 @@ +# Verify function-sugar hover reports parameter and bind syntax types, not +# unrelated enclosing expression types. +steps: +- step: create + files: + main.jsonnet: | + local f(((param:|))x((rparen:|))) ((eq:|))= x + 1; + ((use:|))f + +- step: diagnosticsSettled + +- step: requestHover + as: paramHover + file: main.jsonnet + at: param +- step: expectHover + request: paramHover + result: + - type: any + - preview: + language: jsonnet + value: |- + local f(x) = x + 1; + +- step: requestHover + as: rparenHover + file: main.jsonnet + at: rparen +- step: expectHover + request: rparenHover + result: + +- step: requestHover + as: equalsHover + file: main.jsonnet + at: eq +- step: expectHover + request: equalsHover + result: + +- step: requestHover + as: usageHover + file: main.jsonnet + at: use +- step: expectHover + request: usageHover + result: + - type: '(x: any) -> number' + - preview: + language: jsonnet + value: |- + local f(x) = x + 1; diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_binding_member_signature.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_binding_member_signature.yaml new file mode 100644 index 00000000..ed268982 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_binding_member_signature.yaml @@ -0,0 +1,50 @@ +# Hover on imported module bindings should include member type details, +# and member access should resolve to a callable signature. +steps: +- step: create + files: + main.libsonnet: | + local database = import "./database/main.libsonnet"; + + { + binding: ((mBinding:|))database, + methodValue: database.((mMethod:|))new("ns", { provider: "eks" }), + } + database/main.libsonnet: | + { + new(namespace, cluster):: { + definitions: { + enabled: cluster.provider == "eks", + }, + }, + } + +- step: diagnosticsSettled + +- step: requestHover + as: bindingHover + file: main.libsonnet + at: mBinding +- step: expectHover + request: bindingHover + result: + - type: '{ new: (namespace: any, cluster: { provider: any, ... }) -> { definitions: { enabled: boolean } } }' + - context: '`./database/main.libsonnet`' + - preview: + language: jsonnet + value: |- + local database = import "./database/main.libsonnet"; + +- step: requestHover + as: methodHover + file: main.libsonnet + at: mMethod +- step: expectHover + request: methodHover + result: + - type: '(namespace: any, cluster: { provider: any, ... }) -> { definitions: { enabled: boolean } }' + - context: '`new` from `./database/main.libsonnet`' + - preview: + language: jsonnet + value: |- + database.new diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml new file mode 100644 index 00000000..eb635de9 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml @@ -0,0 +1,27 @@ +# Verify hover on an imported method field resolves to the imported callable type, +# rather than falling back to the enclosing local object type. +steps: +- step: create + files: + main.libsonnet: | + local module = import "./module/main.libsonnet"; + + { value: module.((m1:|))new(1) } + module/main.libsonnet: | + { new(x):: { y: x + 1 } } + +- step: diagnosticsSettled + +- step: requestHover + as: importedMethodHover + file: main.libsonnet + at: m1 +- step: expectHover + request: importedMethodHover + result: + - type: '(x: any) -> { y: number }' + - context: '`new` from `./module/main.libsonnet`' + - preview: + language: jsonnet + value: |- + module.new diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_no_result_on_comment_token.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_no_result_on_comment_token.yaml new file mode 100644 index 00000000..d91e0367 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_no_result_on_comment_token.yaml @@ -0,0 +1,18 @@ +# Verify hover does not trigger on comment text. +# Requesting hover within `//` should return `null`. +steps: +- step: create + files: + main.jsonnet: | + // ((m1:|))hi there + 1 + 1 + +- step: diagnosticsSettled + +- step: requestHover + as: hoverOnComment + file: main.jsonnet + at: m1 +- step: expectHover + request: hoverOnComment + result: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml new file mode 100644 index 00000000..94afc6f6 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml @@ -0,0 +1,23 @@ +# Verify hover preserves non-identifier object keys in the displayed object type. +# Hovering `obj` usage should include both `"foo bar"` and `"the-field"`. +steps: +- step: create + files: + main.jsonnet: | + local obj = { "foo bar": 3, "the-field": 4 }; + ((m1:|))obj + +- step: diagnosticsSettled + +- step: requestHover + as: objectTypeHasSpacedKey + file: main.jsonnet + at: m1 +- step: expectHover + request: objectTypeHasSpacedKey + result: + - type: '{ foo bar: number, the-field: number }' + - preview: + language: jsonnet + value: |- + local obj = { "foo bar": 3, "the-field": 4 }; diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_field_key_is_string.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_field_key_is_string.yaml new file mode 100644 index 00000000..fe285480 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_field_key_is_string.yaml @@ -0,0 +1,23 @@ +# Hover on an object field key should report `string`. +steps: +- step: create + files: + main.jsonnet: | + { + ((fieldKey:|))z: 1, + } + +- step: diagnosticsSettled + +- step: requestHover + as: fieldKeyHover + file: main.jsonnet + at: fieldKey +- step: expectHover + request: fieldKeyHover + result: + - type: string + - preview: + language: jsonnet + value: |- + z: 1 diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml new file mode 100644 index 00000000..cc4c3ede --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml @@ -0,0 +1,55 @@ +# Hover on object-local bindings should report the binding's value type at both +# declaration and usage sites, not the enclosing object type. +steps: +- step: create + files: + main.jsonnet: | + { + local ((bindX:|))x ((bindEq:|))= { + a: 1, + b: 2, + }, + z: ((useX:|))x, + } + +- step: diagnosticsSettled + +- step: requestHover + as: bindHover + file: main.jsonnet + at: bindX +- step: expectHover + request: bindHover + result: + - type: '{ a: number, b: number }' + - preview: + language: jsonnet + value: |- + local x = { + a: 1, + b: 2, + } + +- step: requestHover + as: bindEqHover + file: main.jsonnet + at: bindEq +- step: expectHover + request: bindEqHover + result: + +- step: requestHover + as: useHover + file: main.jsonnet + at: useX +- step: expectHover + request: useHover + result: + - type: '{ a: number, b: number }' + - preview: + language: jsonnet + value: |- + local x = { + a: 1, + b: 2, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_usage_precision.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_usage_precision.yaml new file mode 100644 index 00000000..91a7c739 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_usage_precision.yaml @@ -0,0 +1,31 @@ +# Hover on object-local binding usage should report the binding value type, +# not an enclosing object wrapper type. +steps: +- step: create + files: + main.jsonnet: | + { + local x = { + a: 1, + b: 2, + }, + z: ((useX:|))x, + } + +- step: diagnosticsSettled + +- step: requestHover + as: useHover + file: main.jsonnet + at: useX +- step: expectHover + request: useHover + result: + - type: '{ a: number, b: number }' + - preview: + language: jsonnet + value: |- + local x = { + a: 1, + b: 2, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml new file mode 100644 index 00000000..3900583d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml @@ -0,0 +1,21 @@ +# Hover on structural braces should not emit object-level noise. +steps: +- step: create + files: + main.jsonnet: | + ((openBrace:|)){ + z: { + a: 1, + b: 2, + }, + } + +- step: diagnosticsSettled + +- step: requestHover + as: openBraceHover + file: main.jsonnet + at: openBrace +- step: expectHover + request: openBraceHover + result: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_union_nested_field_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_union_nested_field_type.yaml new file mode 100644 index 00000000..2fb40077 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_union_nested_field_type.yaml @@ -0,0 +1,32 @@ +# Verify hover keeps union precision for nested branch-dependent field values: +# `obj.a.b` should include number, boolean, and string in its inferred type. +steps: +- step: create + files: + main.jsonnet: | + local f(x, y, z) = + assert std.isBoolean(x) && std.isBoolean(y) && std.isBoolean(z); + local obj = { + a: + if x then + { b: if y then 1 else false } + else + { b: if z then true else "hi" }, + }; + obj.a.((m1:|))b; + f(true, false, true) + +- step: diagnosticsSettled + +- step: requestHover + as: unionFieldHoverNumber + file: main.jsonnet + at: m1 +- step: expectHover + request: unionFieldHoverNumber + result: + - type: 'true | false | number | string' + - preview: + language: jsonnet + value: |- + obj.a.b diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/import_definition_precise_range.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/import_definition_precise_range.yaml new file mode 100644 index 00000000..a4cdc1be --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/import_definition_precise_range.yaml @@ -0,0 +1,36 @@ +# Import-definition targets should resolve to a concrete range in the imported file. +steps: +- step: create + files: + import_target.libsonnet: | + [[targetRoot:{ + value: 7, + }]] + import_binding_definition.jsonnet: | + local lib = import "./import_target.libsonnet"; + ((importBindingUse:|lib)).value + import_path_definition.jsonnet: | + local lib = import "./((importPath:|import_target.libsonnet))"; + lib + +- step: diagnosticsSettled + +- step: requestDefinition + as: defFromImportBinding + file: import_binding_definition.jsonnet + at: importBindingUse +- step: expectDefinition + request: defFromImportBinding + result: + file: import_target.libsonnet + range: targetRoot + +- step: requestDefinition + as: defFromImportPath + file: import_path_definition.jsonnet + at: importPath +- step: expectDefinition + request: defFromImportPath + result: + file: import_target.libsonnet + range: targetRoot diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_cross_file_refs_rename.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_cross_file_refs_rename.yaml new file mode 100644 index 00000000..0860cf65 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_cross_file_refs_rename.yaml @@ -0,0 +1,66 @@ +# Imported-member references and rename should work from importer use sites. +steps: +- step: create + files: + cross_file_lib.libsonnet: | + { + ((libValue:|value)): 42, + } + cross_file_main.jsonnet: | + local lib = import "./cross_file_lib.libsonnet"; + { + one: lib.((mainValueOne:|value)), + two: lib.((mainValueTwo:|value)), + } + +- step: diagnosticsSettled + +- step: requestDefinition + as: defFromImporterMember + file: cross_file_main.jsonnet + at: mainValueOne +- step: expectDefinition + request: defFromImporterMember + result: + file: cross_file_lib.libsonnet + at: libValue + text: value + +- step: requestReferences + as: refsFromImporterMember + file: cross_file_main.jsonnet + at: mainValueOne + include_declaration: true +- step: expectReferences + request: refsFromImporterMember + result: + - file: cross_file_lib.libsonnet + at: libValue + text: value + - file: cross_file_main.jsonnet + at: mainValueOne + text: value + - file: cross_file_main.jsonnet + at: mainValueTwo + text: value + +- step: requestRename + as: renameFromImporterMember + file: cross_file_main.jsonnet + at: mainValueOne + new_name: renamed +- step: expectRename + request: renameFromImporterMember + result: + edits: + cross_file_lib.libsonnet: + - at: libValue + text: value + replace: renamed + cross_file_main.jsonnet: + - at: mainValueOne + text: value + replace: renamed + - at: mainValueTwo + text: value + replace: renamed diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_nested_consistency.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_nested_consistency.yaml new file mode 100644 index 00000000..ca2f3e8f --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_nested_consistency.yaml @@ -0,0 +1,81 @@ +# Definition/hover/references/rename should align for nested imported members. +steps: +- step: create + files: + nested.libsonnet: | + { + outer: { + ((libValue:|value)): 42, + }, + } + main.jsonnet: | + local lib = import "./nested.libsonnet"; + { + one: lib.outer.((mainValueOne:|value)), + two: lib.outer.((mainValueTwo:|value)), + } + +- step: diagnosticsSettled + +- step: requestDefinition + as: defFromImporterMember + file: main.jsonnet + at: mainValueOne +- step: expectDefinition + request: defFromImporterMember + result: + file: nested.libsonnet + at: libValue + text: value + +- step: requestHover + as: hoverFromImporterMember + file: main.jsonnet + at: mainValueOne +- step: expectHover + request: hoverFromImporterMember + result: + - type: number + - context: "`outer.value` from `./nested.libsonnet`" + - preview: + language: jsonnet + value: lib.outer.value + +- step: requestReferences + as: refsFromImporterMember + file: main.jsonnet + at: mainValueOne + include_declaration: true +- step: expectReferences + request: refsFromImporterMember + result: + - file: nested.libsonnet + at: libValue + text: value + - file: main.jsonnet + at: mainValueOne + text: value + - file: main.jsonnet + at: mainValueTwo + text: value + +- step: requestRename + as: renameFromImporterMember + file: main.jsonnet + at: mainValueOne + new_name: renamedValue +- step: expectRename + request: renameFromImporterMember + result: + edits: + nested.libsonnet: + - at: libValue + text: value + replace: renamedValue + main.jsonnet: + - at: mainValueOne + text: value + replace: renamedValue + - at: mainValueTwo + text: value + replace: renamedValue diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/importstr_non_json_dependency.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/importstr_non_json_dependency.yaml new file mode 100644 index 00000000..a67cb9b1 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/importstr_non_json_dependency.yaml @@ -0,0 +1,62 @@ +# Ensure importstr dependencies on non-Jsonnet files are not interpreted +# as Jsonnet during analysis. +steps: + # Build a tiny workspace with: + # - `main.libsonnet` consuming values + # - `data.libsonnet` defining `importstr`/`importbin` + # - `script.k` intentionally non-Jsonnet content +- step: create + files: + # Consumer file: we hover the final object value positions to validate inferred types. + main.libsonnet: | + local data = import "./data.libsonnet"; + local payload = data.payload; + local bytes = data.bytes; + { payload: ((m1:|))payload, bytes: b((m2:|))ytes } + # Producer file: this is the behavior under test. + # `importstr` should infer `string`; `importbin` should infer byte array semantics. + data.libsonnet: | + { + payload: importstr "./script.k", + bytes: importbin "./script.k", + } + # Non-Jsonnet payload that previously caused crashes when traversed like code imports. + script.k: | + import regex + x = "abc" + open: + # Open both relevant Jsonnet files so hover/type inference has active documents. + - main.libsonnet + - data.libsonnet + + # Hover the `payload` value in the final object expression. +- step: requestHover + as: payloadHover + file: main.libsonnet + at: m1 + # `importstr` must infer a string type. +- step: expectHover + request: payloadHover + result: + - type: string + - context: '`payload` from `./data.libsonnet`' + - preview: + language: jsonnet + value: |- + local payload = data.payload; + + # Hover the `bytes` value in the same final object expression. +- step: requestHover + as: bytesHover + file: main.libsonnet + at: m2 + # `importbin` must infer bounded byte numbers (0..255). +- step: expectHover + request: bytesHover + result: + - type: array + - context: '`bytes` from `./data.libsonnet`' + - preview: + language: jsonnet + value: |- + local bytes = data.bytes; diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_after_flow_narrowing_edit.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_after_flow_narrowing_edit.yaml new file mode 100644 index 00000000..235858ea --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_after_flow_narrowing_edit.yaml @@ -0,0 +1,43 @@ +# Confirm flow-sensitive inlay hints appear when a guard narrows a type, +# and disappear after an edit that removes the narrowing condition. +steps: +- step: create + files: + main.jsonnet: | + [[hintRangeBefore:local y = std.extVar("y"); + local ((hintPos:x|)) = if std.isNumber(y) then y else 0; + x]] + +- step: diagnosticsSettled + +- step: requestInlayHints + as: narrowed + file: main.jsonnet + range: hintRangeBefore + +- step: expectInlayHints + request: narrowed + result: + - positionOf: hintPos + label: ": number" + kind: 1 + paddingLeft: true + +- step: changeFull + file: main.jsonnet + text: | + [[hintRangeAfter:local y = std.extVar("y"); + local x = if true then y else 0; + x]] + version: 2 + +- step: diagnosticsSettled + +- step: requestInlayHints + as: widened + file: main.jsonnet + range: hintRangeAfter + +- step: expectInlayHints + request: widened + result: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_comprehension_destructuring_modes.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_comprehension_destructuring_modes.yaml new file mode 100644 index 00000000..19d28d45 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_comprehension_destructuring_modes.yaml @@ -0,0 +1,53 @@ +# Verify destructured comprehension hints require both comprehension and +# destructuring categories. +steps: +- step: create + files: + main.jsonnet: | + [[hintRange:[a + b for [((compA:a|)), ((compB:b|))] in [[1, 2]]]]] + +- step: diagnosticsSettled + +- step: config + settings: + jsonnet: + inlayHints: + local: "off" + objectLocal: "off" + comprehensions: "all" + destructuring: "off" + +- step: requestInlayHints + as: comprehensionOnly + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: comprehensionOnly + result: + +- step: config + settings: + jsonnet: + inlayHints: + local: "off" + objectLocal: "off" + comprehensions: "all" + destructuring: "all" + +- step: requestInlayHints + as: comprehensionAndDestructuring + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: comprehensionAndDestructuring + result: + - positionOf: compA + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: compB + label: ": number" + kind: 1 + paddingLeft: true diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_config_categories.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_config_categories.yaml new file mode 100644 index 00000000..f070c7a7 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_config_categories.yaml @@ -0,0 +1,55 @@ +# Verify config-driven category toggles for inlay hints. +steps: +- step: create + files: + main.jsonnet: | + [[hintRange:local ((topLocal:x|)) = 1; + { + local ((objLocal:y|)) = 2, + ((fieldA:a|)): x + y, + ((methodM:m|))(): 1, + }]] + +- step: diagnosticsSettled + +- step: requestInlayHints + as: defaultHints + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: defaultHints + result: + - positionOf: topLocal + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: objLocal + label: ": number" + kind: 1 + paddingLeft: true + +- step: config + settings: + jsonnet: + inlayHints: + local: "off" + objectLocal: "off" + objectMembers: "all" + +- step: requestInlayHints + as: fieldMethodHints + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: fieldMethodHints + result: + - positionOf: fieldA + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: methodM + label: " -> number" + kind: 1 + paddingLeft: true diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_local_mode_filters.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_local_mode_filters.yaml new file mode 100644 index 00000000..2f2f92ad --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_local_mode_filters.yaml @@ -0,0 +1,87 @@ +# Verify local/object-local mode filters variable and function hints independently. +steps: +- step: create + files: + main.jsonnet: | + [[hintRange:local ((topVar:x|)) = 1; + local ((topFn:f|))() = 1; + { + local ((objVar:y|)) = 2, + local ((objFn:g|))() = 2, + a: x + y + f() + g(), + }]] + +- step: diagnosticsSettled + +- step: requestInlayHints + as: defaultHints + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: defaultHints + result: + - positionOf: topVar + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: topFn + label: " -> number" + kind: 1 + paddingLeft: true + - positionOf: objVar + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: objFn + label: " -> number" + kind: 1 + paddingLeft: true + +- step: config + settings: + jsonnet: + inlayHints: + local: "variables" + objectLocal: "variables" + +- step: requestInlayHints + as: variableOnly + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: variableOnly + result: + - positionOf: topVar + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: objVar + label: ": number" + kind: 1 + paddingLeft: true + +- step: config + settings: + jsonnet: + inlayHints: + local: "functions" + objectLocal: "functions" + +- step: requestInlayHints + as: functionOnly + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: functionOnly + result: + - positionOf: topFn + label: " -> number" + kind: 1 + paddingLeft: true + - positionOf: objFn + label: " -> number" + kind: 1 + paddingLeft: true diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_object_locals_default.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_object_locals_default.yaml new file mode 100644 index 00000000..4407598c --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_object_locals_default.yaml @@ -0,0 +1,31 @@ +# Ensure default inlay hints include local/object-local bindings, while +# object fields and methods remain off by default. +steps: +- step: create + files: + main.jsonnet: | + [[hintRange:{ + local ((objLocalVar:x|)) = 1, + local ((objLocalFn:f|))() = 1, + a: x, + m(): f(), + }]] + +- step: diagnosticsSettled + +- step: requestInlayHints + as: defaultHints + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: defaultHints + result: + - positionOf: objLocalVar + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: objLocalFn + label: " -> number" + kind: 1 + paddingLeft: true diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_parameters_and_call_arguments.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_parameters_and_call_arguments.yaml new file mode 100644 index 00000000..7356cfa4 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_parameters_and_call_arguments.yaml @@ -0,0 +1,40 @@ +# Verify parameter and call-argument categories together, including +# named-argument skip behavior. +steps: +- step: create + files: + main.jsonnet: | + [[hintRange:local add(((paramX:x|))=1, ((paramY:y|))=1) = x + y; + add(((argX:|1)), y=2)]] + +- step: diagnosticsSettled + +- step: config + settings: + jsonnet: + inlayHints: + local: "off" + objectLocal: "off" + functionParameters: "all" + callArguments: "all" + +- step: requestInlayHints + as: parameterAndCallHints + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: parameterAndCallHints + result: + - positionOf: paramX + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: paramY + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: argX + label: "x:" + kind: 2 + paddingRight: true diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/lint_config_flip_mid_session.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/lint_config_flip_mid_session.yaml new file mode 100644 index 00000000..b843b68a --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/lint_config_flip_mid_session.yaml @@ -0,0 +1,52 @@ +# Verify runtime config flips propagate immediately: enabling lint emits +# diagnostics, disabling clears them, and re-enabling restores them. +steps: +- step: create + files: + main.jsonnet: local ((m1:|((m2:|))))unused = 1; 42 + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: "unused variable: `unused`; prefix with `_` to silence this warning" + +- step: config + settings: + jsonnet: + enableLintDiagnostics: false + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m2 + text: unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: "unused variable: `unused`; prefix with `_` to silence this warning" + diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/marker_syntax_definition_smoke.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/marker_syntax_definition_smoke.yaml new file mode 100644 index 00000000..a38b9b41 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/marker_syntax_definition_smoke.yaml @@ -0,0 +1,26 @@ +# Marker syntax smoke test: +# - `[[name:text]]` defines a range marker. +# - `((name:|x))` defines a cursor marker before `x`. +steps: +- step: create + files: + main.jsonnet: | + local [[valueDecl:value]] = 1; + ((valueUse:|value)) + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] + +- step: requestDefinition + as: def + file: main.jsonnet + at: valueUse + +- step: expectDefinition + request: def + result: + file: main.jsonnet + at: valueDecl diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml new file mode 100644 index 00000000..74b71d66 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml @@ -0,0 +1,157 @@ +# Coverage scenario for request/expect pairs that were previously unexercised +# in YAML runner tests. This intentionally mixes many handler types. +steps: +- step: create + files: + main.jsonnet: | + local add(a, b) = a + b; + local value = add(1, 2); + { result: value } + open: [] + +- step: open + file: main.jsonnet + text: | + local add(a, b) = a + b; + local ((valueDecl:|value)) = add(1, ((sigPos:|2))); + { result: ((valueUse:|[[valueUseRange:value]])) } + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] + +- step: save + file: main.jsonnet + text: + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] + +- step: requestDefinition + as: def + file: main.jsonnet + at: valueUse +- step: expectDefinition + request: def + result: + file: main.jsonnet + at: valueDecl + text: value + +- step: requestDeclaration + as: decl + file: main.jsonnet + at: valueUse +- step: expectDeclaration + request: decl + result: + file: main.jsonnet + at: valueDecl + text: value + +- step: requestTypeDefinition + as: typeDef + file: main.jsonnet + at: valueUse +- step: expectTypeDefinition + request: typeDef + result: + file: main.jsonnet + at: valueDecl + text: value + +- step: requestPrepareRename + as: prep + file: main.jsonnet + at: valueUse +- step: expectPrepareRename + request: prep + result: {rangeOf: valueUseRange} + +- step: requestRename + as: rename + file: main.jsonnet + at: valueUse + new_name: output + +- step: expectRename + request: rename + result: + edits: + main.jsonnet: + - at: valueDecl + text: value + replace: output + - at: valueUse + text: value + replace: output + +- step: requestSignatureHelp + as: sig + file: main.jsonnet + at: sigPos +- step: expectSignatureHelp + request: sig + result: + signatures: + - label: add(a, b) + documentation: + parameters: + - label: [4, 5] + documentation: + - label: [7, 8] + documentation: + activeParameter: 1 + activeSignature: 0 + activeParameter: 1 + +- step: changeFull + file: main.jsonnet + text: "local [[rangeFmt:broken]] = " + version: 2 + +- step: requestFormatting + as: fmt + file: main.jsonnet + +- step: expectFormatting + request: fmt + result: + +- step: requestRangeFormatting + as: rangeFmt + file: main.jsonnet + range: rangeFmt + +- step: expectRangeFormatting + request: rangeFmt + result: + +- step: requestDocumentSymbol + as: docSymbols + file: missing.jsonnet + +- step: expectDocumentSymbol + request: docSymbols + result: + +- step: requestWorkspaceSymbol + as: wsSymbols + query: does-not-exist-anywhere + +- step: expectWorkspaceSymbol + request: wsSymbols + result: + +- step: requestCodeLens + as: codeLens + file: missing.jsonnet + +- step: expectCodeLens + request: codeLens + result: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/open_request_expect_sequence.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/open_request_expect_sequence.yaml new file mode 100644 index 00000000..2f3e2a75 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/open_request_expect_sequence.yaml @@ -0,0 +1,72 @@ +# Baseline open -> diagnostics -> codeAction request/expect sequence with +# full structural quickfix assertions. +steps: +- step: create + files: + scenario-runner.jsonnet: ((rangeStart:|))local ((x:|x)) = 1; 42 + +- step: diagnosticsSettled + +- step: requestCodeAction + as: unusedActions + file: scenario-runner.jsonnet + at: rangeStart + text: local x = 1; 42 + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + +- step: expectCodeAction + request: unusedActions + result: + - title: Prefix `x` with `_` + kind: quickfix + isPreferred: true + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-runner.jsonnet: + - at: x + text: x + replace: _x + + - title: Remove unused binding `x` + kind: quickfix + isPreferred: false + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-runner.jsonnet: + - at: rangeStart + len: 11 + replace: "" + + - title: Remove all unused bindings + kind: source.fixAll + isPreferred: false + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-runner.jsonnet: + - at: rangeStart + len: 11 + replace: "" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/preserve_cross_file_refs_after_close.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/preserve_cross_file_refs_after_close.yaml new file mode 100644 index 00000000..ae4d4cf8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/preserve_cross_file_refs_after_close.yaml @@ -0,0 +1,30 @@ +# Closing an importer should not evict cross-file reference information +# needed to answer references from an imported file. +steps: +- step: create + files: + lib.jsonnet: local ((m1:|))target = 1; ((m2:|))target + main.jsonnet: local lib = import 'lib.jsonnet'; lib.((m3:|))target + +- step: diagnosticsSettled + +- step: close + file: main.jsonnet + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterClose + file: lib.jsonnet + at: m1 + include_declaration: false + +- step: expectReferences + request: refsAfterClose + result: + - file: lib.jsonnet + at: m2 + text: target + - file: main.jsonnet + at: m3 + text: target diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/rapid_incremental_edit_recovery.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/rapid_incremental_edit_recovery.yaml new file mode 100644 index 00000000..0012cdd8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/rapid_incremental_edit_recovery.yaml @@ -0,0 +1,61 @@ +# Stress incremental edits: introduce a transient parse break and then fix it, +# then assert final diagnostics, navigation, and tokenization are coherent. +steps: +- step: create + files: + main.jsonnet: "[[kw_local:local]] ((m2:|[[decl_x:x]])) [[eq:=]] ((mLit:|))1; ((m1:|[[use_x:x]]))" + +- step: diagnosticsSettled + + # Temporary invalid edit at the literal site. +- step: changeIncremental + file: main.jsonnet + at: mLit + len: 1 + text: "\"" + version: 2 + + # Immediate correction to a valid number literal. +- step: changeIncremental + file: main.jsonnet + at: mLit + len: 1 + text: "[[num_tok:2]]" + version: 3 + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] + +- step: requestDefinition + as: xDefinition + file: main.jsonnet + at: m1 +- step: expectDefinition + request: xDefinition + result: + file: main.jsonnet + at: m2 + text: x + +- step: requestSemanticTokensFull + as: tokensAfterRecovery + file: main.jsonnet + +- step: expectSemanticTokensFull + request: tokensAfterRecovery + result: + tokensByMarker: + - marker: kw_local + type: keyword + - marker: decl_x + type: variable + modifiers: [declaration, definition] + - marker: eq + type: operator + - marker: num_tok + type: number + - marker: use_x + type: variable diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/relative_paths_smoke.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/relative_paths_smoke.yaml new file mode 100644 index 00000000..d4406267 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/relative_paths_smoke.yaml @@ -0,0 +1,25 @@ +# Smoke-test relative path handling in scenario files and imports for +# cross-file references under nested directories. +steps: +- step: create + files: + lib/helper.jsonnet: local ((m1:|))target = 1; ((m2:|))target + app/main.jsonnet: local lib = import '../lib/helper.jsonnet'; lib.((m3:|))target + +- step: diagnosticsSettled + +- step: requestReferences + as: refs + file: lib/helper.jsonnet + at: m1 + include_declaration: false + +- step: expectReferences + request: refs + result: + - file: lib/helper.jsonnet + at: m2 + text: target + - file: app/main.jsonnet + at: m3 + text: target diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/rename_reopen_references_roundtrip.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/rename_reopen_references_roundtrip.yaml new file mode 100644 index 00000000..8d093100 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/rename_reopen_references_roundtrip.yaml @@ -0,0 +1,80 @@ +# Cross-file rename should remain coherent after close/reopen boundaries: +# references and definition must reflect the renamed symbol everywhere. +steps: +- step: create + files: + lib.jsonnet: local ((libDecl:|target)) = 1; ((libUse:|target)) + main.jsonnet: local lib = import "lib.jsonnet"; lib.((mainUse:|target)) + +- step: diagnosticsSettled + +- step: requestRename + as: renameTarget + file: lib.jsonnet + at: libDecl + new_name: renamed + +- step: expectRename + request: renameTarget + result: + edits: + lib.jsonnet: + - at: libDecl + text: target + replace: renamed + - at: libUse + text: target + replace: renamed + main.jsonnet: + - at: mainUse + text: target + replace: renamed + + # Apply the edits as-if the client accepted the workspace edit. +- step: changeFull + file: lib.jsonnet + text: local ((libDeclAfter:|renamed)) = 1; ((libUseAfter:|renamed)) + version: 2 + +- step: changeFull + file: main.jsonnet + text: local lib = import "lib.jsonnet"; lib.renamed + version: 2 + + # Simulate editor lifecycle transitions around the importer document. +- step: close + file: main.jsonnet + +- step: open + file: main.jsonnet + text: local lib = import "lib.jsonnet"; lib.((mainUseAfter:|renamed)) + version: 3 + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterReopen + file: lib.jsonnet + at: libDeclAfter + include_declaration: false + +- step: expectReferences + request: refsAfterReopen + result: + - file: lib.jsonnet + at: libUseAfter + text: renamed + - file: main.jsonnet + at: mainUseAfter + text: renamed + +- step: requestDefinition + as: defAfterReopen + file: main.jsonnet + at: mainUseAfter +- step: expectDefinition + request: defAfterReopen + result: + file: lib.jsonnet + at: libDeclAfter + text: renamed diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/semantic_tokens_stability_after_incremental_edits.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/semantic_tokens_stability_after_incremental_edits.yaml new file mode 100644 index 00000000..95b5d4b0 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/semantic_tokens_stability_after_incremental_edits.yaml @@ -0,0 +1,101 @@ +# Verify semantic token classification stays structurally consistent across +# an incremental edit that changes value text but not token kinds. +steps: +- step: create + files: + main.jsonnet: "((rangeStart:|))[[kw_local:local]] [[decl_x:x]] [[eq:=]] ((num:|))[[num_tok:1]]; [[use_x:x]]" + +- step: diagnosticsSettled + +- step: requestSemanticTokensFull + as: beforeFull + file: main.jsonnet + +- step: expectSemanticTokensFull + request: beforeFull + result: + tokensByMarker: + - marker: kw_local + type: keyword + - marker: decl_x + type: variable + modifiers: [declaration, definition] + - marker: eq + type: operator + - marker: num_tok + type: number + - marker: use_x + type: variable + +- step: requestSemanticTokensRange + as: beforeRange + file: main.jsonnet + at: rangeStart + len: 40 + +- step: expectSemanticTokensRange + request: beforeRange + result: + tokensByMarker: + - marker: kw_local + type: keyword + - marker: decl_x + type: variable + modifiers: [declaration, definition] + - marker: eq + type: operator + - marker: num_tok + type: number + - marker: use_x + type: variable + +- step: changeIncremental + file: main.jsonnet + at: num + len: 1 + text: "[[num_tok:2]]" + version: 2 + +- step: diagnosticsSettled + +- step: requestSemanticTokensFull + as: afterFull + file: main.jsonnet + +- step: expectSemanticTokensFull + request: afterFull + result: + tokensByMarker: + - marker: kw_local + type: keyword + - marker: decl_x + type: variable + modifiers: [declaration, definition] + - marker: eq + type: operator + - marker: num_tok + type: number + - marker: use_x + type: variable + +- step: requestSemanticTokensRange + as: afterRange + file: main.jsonnet + at: rangeStart + len: 40 + +- step: expectSemanticTokensRange + request: afterRange + result: + tokensByMarker: + - marker: kw_local + type: keyword + - marker: decl_x + type: variable + modifiers: [declaration, definition] + - marker: eq + type: operator + - marker: num_tok + type: number + - marker: use_x + type: variable diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/signature_help_named_arguments.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/signature_help_named_arguments.yaml new file mode 100644 index 00000000..267cb0b6 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/signature_help_named_arguments.yaml @@ -0,0 +1,29 @@ +# Ensure signature help stays correct with named arguments provided out of +# declaration order and reports the active parameter precisely. +steps: +- step: create + files: + main.jsonnet: local add(a, b, c) = a + b + c; add(c=3, a=1, b=2((m1:|))) + +- step: diagnosticsSettled + +- step: requestSignatureHelp + as: sigNamed + file: main.jsonnet + at: m1 +- step: expectSignatureHelp + request: sigNamed + result: + signatures: + - label: add(a, b, c) + documentation: + parameters: + - label: [4, 5] + documentation: + - label: [7, 8] + documentation: + - label: [10, 11] + documentation: + activeParameter: 1 + activeSignature: 0 + activeParameter: 1 diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/watched_file_lifecycle.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/watched_file_lifecycle.yaml new file mode 100644 index 00000000..6c78c99b --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/watched_file_lifecycle.yaml @@ -0,0 +1,74 @@ +# End-to-end watched-file lifecycle scenario: create, change, delete events +# should update analysis and preserve stable cross-file reference answers. +steps: +- step: create + files: + main.jsonnet: local lib = import 'lib1.jsonnet'; lib.((m3:|((m6:|))))target + open: + - main.jsonnet + +- step: diagnosticsSettled + +- step: writeFile + path: lib1.jsonnet + text: local ((m1:|))target = 1; ((m2:|))target + +- step: notifyWatchedFiles + changes: + - path: lib1.jsonnet + type: created + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterCreate + file: lib1.jsonnet + at: m1 + include_declaration: false + +- step: expectReferences + request: refsAfterCreate + result: + - file: lib1.jsonnet + at: m2 + text: target + - file: main.jsonnet + at: m3 + text: target + +- step: writeFile + path: lib1.jsonnet + text: local ((m4:|))target = 2; ((m5:|))target + +- step: notifyWatchedFiles + changes: + - path: lib1.jsonnet + type: changed + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterChange + file: lib1.jsonnet + at: m4 + include_declaration: false + +- step: expectReferences + request: refsAfterChange + result: + - file: lib1.jsonnet + at: m5 + text: target + - file: main.jsonnet + at: m6 + text: target + +- step: deleteFile + path: lib1.jsonnet + +- step: notifyWatchedFiles + changes: + - path: lib1.jsonnet + type: deleted + +- step: diagnosticsSettled diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_flat_positive.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_flat_positive.yaml new file mode 100644 index 00000000..2623ac8b --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_flat_positive.yaml @@ -0,0 +1,23 @@ +# Verify `workspace/symbol` can find `result` and report a concrete symbol +# location/range in the source file via shorthand structural assertions. +steps: +- step: create + files: + main.jsonnet: | + local x = 1; { ((m1:|))result: x } + +- step: diagnosticsSettled + +- step: requestWorkspaceSymbol + as: wsSymbols + query: result + +- step: expectWorkspaceSymbol + request: wsSymbols + result: + symbols: + - name: result + kind: 8 + file: main.jsonnet + at: m1 + text: "result: x" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_nested_container.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_nested_container.yaml new file mode 100644 index 00000000..445e0638 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_nested_container.yaml @@ -0,0 +1,23 @@ +# Workspace symbol queries should preserve nesting context via container_name. +steps: +- step: create + files: + main.jsonnet: "{ outer: { ((m1:|))innerField: 1 } }" + +- step: diagnosticsSettled + +- step: requestWorkspaceSymbol + as: wsNested + query: inner + +- step: expectWorkspaceSymbol + request: wsNested + result: + symbols: + - name: innerField + kind: 8 + file: main.jsonnet + at: m1 + text: "innerField: 1" + container_name: outer + diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_updates_after_incremental_rename.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_updates_after_incremental_rename.yaml new file mode 100644 index 00000000..bbc79bda --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_updates_after_incremental_rename.yaml @@ -0,0 +1,68 @@ +# Verify workspace symbol indexing tracks incremental edits in-place: +# - query for the old field name returns no symbols after rename +# - query for the new field name returns the renamed field at the new range. +steps: + - step: create + files: + main.jsonnet: | + local obj = { + [[resultOneField:resultOne]]: 1, + keepMe: 2, + }; + obj.[[resultOneUse:resultOne]] + + - step: diagnosticsSettled + + - step: requestWorkspaceSymbol + as: beforeRename + query: resultOne + + - step: expectWorkspaceSymbol + request: beforeRename + result: + symbols: + - name: resultOne + kind: 8 + file: main.jsonnet + at: resultOneField + text: "resultOne: 1" + container_name: obj + + - step: changeIncremental + file: main.jsonnet + at: resultOneField + text: ((finalValueField:|finalValue)) + len: 9 + version: 2 + + - step: changeIncremental + file: main.jsonnet + at: resultOneUse + text: ((finalValueUse:|finalValue)) + len: 9 + version: 3 + + - step: diagnosticsSettled + + - step: requestWorkspaceSymbol + as: oldNameGone + query: resultOne + + - step: expectWorkspaceSymbol + request: oldNameGone + result: null + + - step: requestWorkspaceSymbol + as: newNamePresent + query: finalValue + + - step: expectWorkspaceSymbol + request: newNamePresent + result: + symbols: + - name: finalValue + kind: 8 + file: main.jsonnet + at: finalValueField + text: "finalValue: 1" + container_name: obj diff --git a/crates/jrsonnet-lsp/tests/stress_tests.rs b/crates/jrsonnet-lsp/tests/stress_tests.rs new file mode 100644 index 00000000..6c23b5db --- /dev/null +++ b/crates/jrsonnet-lsp/tests/stress_tests.rs @@ -0,0 +1,722 @@ +//! Stress tests for the LSP server. +//! +//! These tests verify the server behaves correctly under load: +//! - Rapid document changes (simulating fast typing) +//! - Concurrent requests from multiple threads +//! - Many documents open simultaneously +//! - Large document handling + +use std::{ + fmt::Write as _, + sync::Arc, + thread, + time::{Duration, Instant}, +}; + +use assert_matches::assert_matches; +use lsp_server::{Connection, Message, Notification, Request}; +use lsp_types::{ + notification::{DidChangeTextDocument, DidOpenTextDocument, Notification as _}, + request::{Completion, GotoDefinition, HoverRequest, Initialize, Request as _, Shutdown}, + CompletionParams, DidChangeTextDocumentParams, DidOpenTextDocumentParams, GotoDefinitionParams, + HoverParams, InitializeParams, PartialResultParams, Position, Range, + TextDocumentContentChangeEvent, TextDocumentIdentifier, TextDocumentItem, + TextDocumentPositionParams, VersionedTextDocumentIdentifier, WorkDoneProgressParams, +}; +use serde_json::json; + +mod support; +use support::{recv_response_by_id, recv_until, LONG_RESPONSE_TIMEOUT, RESPONSE_TIMEOUT}; + +const QUIESCENCE_TIMEOUT: Duration = Duration::from_millis(100); + +// ============================================================================= +// Test Helpers +// ============================================================================= + +/// Helper to create an initialize request. +fn initialize_request(id: i32) -> Request { + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(InitializeParams::default()).expect("expected success"), + ) +} + +/// Helper to create a shutdown request. +fn shutdown_request(id: i32) -> Request { + Request::new( + id.into(), + Shutdown::METHOD.to_string(), + serde_json::Value::Null, + ) +} + +/// Helper to create an initialized notification. +fn initialized_notification() -> Notification { + Notification::new("initialized".to_string(), json!({})) +} + +/// Helper to create an exit notification. +fn exit_notification() -> Notification { + Notification::new("exit".to_string(), json!({})) +} + +/// Helper to create a didOpen notification. +fn did_open_notification(uri: &str, text: &str, version: i32) -> Notification { + let params = DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: uri.parse().expect("expected success"), + language_id: "jsonnet".to_string(), + version, + text: text.to_string(), + }, + }; + Notification::new( + DidOpenTextDocument::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a didChange notification (full document replacement). +fn did_change_notification_full(uri: &str, text: &str, version: i32) -> Notification { + let params = DidChangeTextDocumentParams { + text_document: VersionedTextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + version, + }, + content_changes: vec![TextDocumentContentChangeEvent { + range: None, + range_length: None, + text: text.to_string(), + }], + }; + Notification::new( + DidChangeTextDocument::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a didChange notification (incremental change). +fn did_change_notification_incremental( + uri: &str, + range: Range, + text: &str, + version: i32, +) -> Notification { + let params = DidChangeTextDocumentParams { + text_document: VersionedTextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + version, + }, + content_changes: vec![TextDocumentContentChangeEvent { + range: Some(range), + range_length: None, + text: text.to_string(), + }], + }; + Notification::new( + DidChangeTextDocument::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a hover request. +fn hover_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = HoverParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + HoverRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a completion request. +fn completion_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = CompletionParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + }; + Request::new( + id.into(), + Completion::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a goto definition request. +fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoDefinition::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Run the server with the given connection in a separate thread. +fn run_server(connection: Connection) -> thread::JoinHandle<()> { + thread::spawn(move || { + let server = jrsonnet_lsp::server::Server::new(connection); + let _ = server.run(); + }) +} + +/// Initialize a server connection and return the client connection. +fn init_server() -> (Connection, thread::JoinHandle<()>) { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Send initialize request + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .expect("expected success"); + + // Receive initialize response + let response = client_conn.receiver.recv().expect("expected success"); + assert_matches!(response, Message::Response(resp) => { + assert!(resp.error.is_none(), "Initialize should succeed"); + }); + + // Send initialized notification + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + (client_conn, server_thread) +} + +/// Shutdown and clean up the server. +fn shutdown_server(client_conn: &Connection, server_thread: thread::JoinHandle<()>, req_id: i32) { + // Send shutdown request + client_conn + .sender + .send(Message::Request(shutdown_request(req_id))) + .expect("expected success"); + + // Receive shutdown response + let response = recv_response_by_id(client_conn, req_id.into(), RESPONSE_TIMEOUT) + .expect("expected shutdown response"); + assert!(response.error.is_none(), "Shutdown should succeed"); + + // Send exit notification + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + + // Wait for server to exit + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +// ============================================================================= +// Stress Tests +// ============================================================================= + +/// Test rapid document changes (simulating fast typing). +/// +/// This verifies that the server handles many quick edits without crashing +/// or getting into an inconsistent state. +#[test] +fn test_rapid_document_changes() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/rapid.jsonnet"; + + // Open a document + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, "{}", 1))) + .expect("expected success"); + + // Wait for initial diagnostics + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); + + // Rapid full-document changes (simulating fast typing) + for i in 2..=100 { + let content = format!("{{ x: {i} }}"); + client_conn + .sender + .send(Message::Notification(did_change_notification_full( + uri, &content, i, + ))) + .expect("expected success"); + } + + // Wait for processing to settle + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); + + // Verify server is still responsive with a hover request + client_conn + .sender + .send(Message::Request(hover_request(1000, uri, 0, 3))) + .expect("expected success"); + + // Should get a response (not necessarily with content, but should respond) + let response = recv_response_by_id(&client_conn, 1000.into(), RESPONSE_TIMEOUT) + .expect("Server should respond after rapid changes"); + assert!(response.error.is_none(), "Request should not error"); + + shutdown_server(&client_conn, server_thread, 1001); +} + +/// Test rapid incremental changes (simulating character-by-character typing). +#[test] +fn test_rapid_incremental_changes() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/incremental.jsonnet"; + + // Open a document + client_conn + .sender + .send(Message::Notification(did_open_notification( + uri, + "local x = 1;\n", + 1, + ))) + .expect("expected success"); + + // Wait for initial diagnostics + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); + + // Simulate typing "x + 1" character by character at the end + let chars = ['x', ' ', '+', ' ', '1']; + let mut version = 2; + for (i, ch) in chars.iter().enumerate() { + let character = u32::try_from(i).expect("typing index should fit in u32"); + let range = Range { + start: Position { line: 1, character }, + end: Position { line: 1, character }, + }; + client_conn + .sender + .send(Message::Notification(did_change_notification_incremental( + uri, + range, + &ch.to_string(), + version, + ))) + .expect("expected success"); + version += 1; + } + + // Wait for processing + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); + + // Verify server is responsive + client_conn + .sender + .send(Message::Request(hover_request(100, uri, 0, 6))) + .expect("expected success"); + + let response = recv_response_by_id(&client_conn, 100.into(), RESPONSE_TIMEOUT) + .expect("Server should respond"); + assert!(response.error.is_none()); + + shutdown_server(&client_conn, server_thread, 101); +} + +/// Test concurrent requests from multiple threads. +/// +/// This verifies that the server can handle requests arriving nearly simultaneously. +#[test] +fn test_concurrent_requests() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/concurrent.jsonnet"; + let text = r"local x = 1; +local y = 2; +local add(a, b) = a + b; +add(x, y)"; + + // Open a document + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text, 1))) + .expect("expected success"); + + // Wait for initial diagnostics + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); + + // Send multiple requests concurrently + let sender = Arc::new(client_conn.sender.clone()); + let handles: Vec<_> = (0..10) + .map(|i| { + let sender = Arc::clone(&sender); + let uri = uri.to_string(); + thread::spawn(move || { + let base_id = (i + 1) * 100; + // Send hover request + sender + .send(Message::Request(hover_request(base_id, &uri, 0, 7))) + .expect("expected success"); + // Send goto definition request + sender + .send(Message::Request(goto_definition_request( + base_id + 1, + &uri, + 3, + 4, + ))) + .expect("expected success"); + // Send completion request + sender + .send(Message::Request(completion_request( + base_id + 2, + &uri, + 3, + 0, + ))) + .expect("expected success"); + }) + }) + .collect(); + + // Wait for all sends to complete + for handle in handles { + handle.join().expect("expected success"); + } + + // Collect all responses (30 requests total) + let deadline = Instant::now() + LONG_RESPONSE_TIMEOUT; + let mut responses = Vec::with_capacity(30); + while responses.len() < 30 { + let Some(remaining) = deadline.checked_duration_since(Instant::now()) else { + break; + }; + let Some(response) = recv_until(&client_conn, remaining, |message| match message { + Message::Response(response) => Some(response), + _ => None, + }) else { + break; + }; + responses.push(response); + } + + // Verify we got all responses + assert_eq!( + responses.len(), + 30, + "Should receive all concurrent request responses" + ); + + // Verify no errors in responses + for resp in &responses { + assert!( + resp.error.is_none(), + "Response {} should not have error: {:?}", + resp.id, + resp.error + ); + } + + shutdown_server(&client_conn, server_thread, 9999); +} + +/// Test many documents open simultaneously. +/// +/// This verifies that the server can handle many open documents without +/// excessive memory usage or performance degradation. +#[test] +fn test_many_documents() { + let (client_conn, server_thread) = init_server(); + + let num_documents = 50; + + // Open many documents + for i in 0..num_documents { + let uri = format!("file:///test/doc{i}.jsonnet"); + let content = format!( + r"local x{i} = {i}; +local f{i}(a) = a + x{i}; +f{i}(1)" + ); + client_conn + .sender + .send(Message::Notification(did_open_notification( + &uri, &content, 1, + ))) + .expect("expected success"); + } + + // Wait for all documents to be processed + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); + + // Query each document to verify they're all accessible + let mut successful_queries = 0; + for i in 0..num_documents { + let uri = format!("file:///test/doc{i}.jsonnet"); + client_conn + .sender + .send(Message::Request(hover_request(i + 100, &uri, 0, 7))) + .expect("expected success"); + + let response = recv_response_by_id(&client_conn, (i + 100).into(), RESPONSE_TIMEOUT) + .expect("Should receive a response for every hover request"); + assert!( + response.error.is_none(), + "Hover request for document {i} should succeed" + ); + successful_queries += 1; + } + + assert_eq!( + successful_queries, num_documents, + "Should successfully query every opened document" + ); + + shutdown_server(&client_conn, server_thread, 9999); +} + +/// Test handling of a large document. +/// +/// This verifies that the server can handle documents with many definitions +/// without excessive slowdown. +#[test] +fn test_large_document() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/large.jsonnet"; + + // Generate a large document with many local bindings + let mut content = String::new(); + let num_locals: usize = 200; + for i in 0..num_locals { + let _ = writeln!(content, "local x{i} = {i};"); + } + content.push_str("{\n"); + for i in 0..num_locals { + let _ = writeln!(content, " field{i}: x{i},"); + } + content.push_str("}\n"); + + // Open the large document + client_conn + .sender + .send(Message::Notification(did_open_notification( + uri, &content, 1, + ))) + .expect("expected success"); + + // Wait for processing + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); + + // Test hover at the beginning + client_conn + .sender + .send(Message::Request(hover_request(100, uri, 0, 7))) + .expect("expected success"); + + let hover_response = recv_response_by_id(&client_conn, 100.into(), RESPONSE_TIMEOUT) + .expect("Server should respond to hover on large document"); + assert!( + hover_response.error.is_none(), + "Hover should succeed on large document" + ); + + // Test goto definition in the middle + let middle_line = num_locals + (num_locals / 2); + client_conn + .sender + .send(Message::Request(goto_definition_request( + 101, + uri, + u32::try_from(middle_line).expect("middle line should fit in u32"), + 12, + ))) + .expect("expected success"); + + let goto_response = recv_response_by_id(&client_conn, 101.into(), RESPONSE_TIMEOUT) + .expect("Should receive goto definition response"); + assert!( + goto_response.error.is_none(), + "Goto definition should succeed on large document" + ); + + shutdown_server(&client_conn, server_thread, 9999); +} + +/// Test document changes during pending requests. +/// +/// This verifies that the server handles document updates while requests +/// are being processed, without returning stale or incorrect results. +#[test] +fn test_changes_during_requests() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/changing.jsonnet"; + + // Open a document + client_conn + .sender + .send(Message::Notification(did_open_notification( + uri, + "local x = 1; x", + 1, + ))) + .expect("expected success"); + + // Wait for initial processing + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); + + // Send a request and immediately change the document + client_conn + .sender + .send(Message::Request(goto_definition_request(100, uri, 0, 13))) + .expect("expected success"); + + // Change document before response + client_conn + .sender + .send(Message::Notification(did_change_notification_full( + uri, + "local y = 2; y", + 2, + ))) + .expect("expected success"); + + // The server should handle this gracefully and still return a response. + let response = recv_response_by_id(&client_conn, 100.into(), RESPONSE_TIMEOUT) + .expect("Should receive goto definition response despite document change"); + if let Some(err) = &response.error { + assert!( + err.code != -32603, + "Should not have internal error: {err:?}" + ); + } + + // Verify server is still responsive after the change + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); + + client_conn + .sender + .send(Message::Request(hover_request(200, uri, 0, 6))) + .expect("expected success"); + + let post_change_response = recv_response_by_id(&client_conn, 200.into(), RESPONSE_TIMEOUT) + .expect("Server should respond after document change"); + assert!( + post_change_response.error.is_none(), + "Server should be responsive after document change" + ); + + shutdown_server(&client_conn, server_thread, 9999); +} + +/// Test that requests are handled without blocking for too long. +/// +/// This is a basic responsiveness test to ensure the server doesn't hang. +#[test] +fn test_responsiveness() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/responsive.jsonnet"; + let text = r"local x = 1; +local f(a) = a * 2; +f(x)"; + + // Open a document + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text, 1))) + .expect("expected success"); + + // Wait for processing + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); + + // Send multiple different request types and verify all respond within timeout + let requests = vec![ + hover_request(1, uri, 0, 7), + goto_definition_request(2, uri, 2, 0), + completion_request(3, uri, 2, 0), + ]; + + for req in requests { + let req_id = req.id.clone(); + client_conn + .sender + .send(Message::Request(req)) + .expect("expected success"); + + let response = recv_response_by_id(&client_conn, req_id.clone(), RESPONSE_TIMEOUT) + .expect("Request timed out while waiting for response"); + assert!( + response.error.is_none(), + "Request {req_id:?} should not error" + ); + } + + shutdown_server(&client_conn, server_thread, 9999); +} + +/// Test clean shutdown during document processing. +/// +/// This verifies that the server shuts down cleanly even when documents +/// are being processed. +#[test] +fn test_shutdown_during_processing() { + let (client_conn, server_thread) = init_server(); + + // Open several documents to keep the server busy + for i in 0..10 { + let uri = format!("file:///test/shutdown{i}.jsonnet"); + let content = format!("local x{i} = {i}; x{i}"); + client_conn + .sender + .send(Message::Notification(did_open_notification( + &uri, &content, 1, + ))) + .expect("expected success"); + } + + // Don't wait for diagnostics - immediately shutdown + // This tests graceful shutdown during active processing + client_conn + .sender + .send(Message::Request(shutdown_request(100))) + .expect("expected success"); + + let response = recv_response_by_id(&client_conn, 100.into(), RESPONSE_TIMEOUT) + .expect("Should receive shutdown response"); + assert!(response.error.is_none(), "Shutdown should succeed"); + + // Send exit + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + + // Server should exit cleanly + server_thread + .join() + .expect("Server thread should exit cleanly after shutdown during processing"); +} diff --git a/crates/jrsonnet-lsp/tests/support/lsp_test_transport.rs b/crates/jrsonnet-lsp/tests/support/lsp_test_transport.rs new file mode 100644 index 00000000..7538e873 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/support/lsp_test_transport.rs @@ -0,0 +1,39 @@ +use std::time::{Duration, Instant}; + +use lsp_server::{Connection, Message, RequestId}; + +pub(crate) const RESPONSE_TIMEOUT: Duration = Duration::from_secs(5); +pub(crate) const LONG_RESPONSE_TIMEOUT: Duration = Duration::from_secs(10); + +pub(crate) fn recv_until( + conn: &Connection, + timeout: Duration, + mut matcher: impl FnMut(Message) -> Option, +) -> Option { + let start = Instant::now(); + loop { + let remaining = timeout.checked_sub(start.elapsed())?; + match conn.receiver.recv_timeout(remaining) { + Ok(message) => { + if let Some(value) = matcher(message) { + return Some(value); + } + } + Err( + crossbeam_channel::RecvTimeoutError::Timeout + | crossbeam_channel::RecvTimeoutError::Disconnected, + ) => return None, + } + } +} + +pub(crate) fn recv_response_by_id( + conn: &Connection, + request_id: RequestId, + timeout: Duration, +) -> Option { + recv_until(conn, timeout, |message| match message { + Message::Response(response) if response.id == request_id => Some(response), + _ => None, + }) +} diff --git a/crates/jrsonnet-lsp/tests/support/mod.rs b/crates/jrsonnet-lsp/tests/support/mod.rs new file mode 100644 index 00000000..f8e6cfcd --- /dev/null +++ b/crates/jrsonnet-lsp/tests/support/mod.rs @@ -0,0 +1,5 @@ +mod lsp_test_transport; + +pub(crate) use lsp_test_transport::{ + recv_response_by_id, recv_until, LONG_RESPONSE_TIMEOUT, RESPONSE_TIMEOUT, +}; diff --git a/crates/jrsonnet-rowan-parser/Cargo.toml b/crates/jrsonnet-rowan-parser/Cargo.toml index 3430ba1d..9ecfc999 100644 --- a/crates/jrsonnet-rowan-parser/Cargo.toml +++ b/crates/jrsonnet-rowan-parser/Cargo.toml @@ -21,3 +21,4 @@ thiserror.workspace = true indoc.workspace = true insta.workspace = true anyhow.workspace = true +jrsonnet-evaluator = { workspace = true, features = ["exp-null-coaelse"] } diff --git a/crates/jrsonnet-rowan-parser/jsonnet.ungram b/crates/jrsonnet-rowan-parser/jsonnet.ungram index b2c70e44..90376010 100644 --- a/crates/jrsonnet-rowan-parser/jsonnet.ungram +++ b/crates/jrsonnet-rowan-parser/jsonnet.ungram @@ -5,25 +5,29 @@ SourceFile = Expr -SuffixIndex = +// Progressive wrapping expression types - each suffix operation wraps its base +ExprField = + base:Expr '?'? '.' - index:Name -SuffixIndexExpr = - ('?' '.')? + field:Name + +ExprIndex = + base:Expr + '?'? + '.'? '[' index:Expr ']' -SuffixSlice = + +ExprSlice = + base:Expr SliceDesc -SuffixApply = + +ExprCall = + callee:Expr ArgsDesc 'tailstrict'? -Suffix = - SuffixIndex -| SuffixIndexExpr -| SuffixSlice -| SuffixApply StmtLocal = 'local' @@ -101,7 +105,6 @@ ExprError = Expr = Stmt* ExprBase - Suffix* ExprBase = ExprBinary @@ -119,6 +122,10 @@ ExprBase = | ExprIfThenElse | ExprFunction | ExprError +| ExprField +| ExprIndex +| ExprSlice +| ExprCall BinaryOperator = '||' | '??' | '&&' diff --git a/crates/jrsonnet-rowan-parser/src/generated/nodes.rs b/crates/jrsonnet-rowan-parser/src/generated/nodes.rs index 1b55459d..99489271 100644 --- a/crates/jrsonnet-rowan-parser/src/generated/nodes.rs +++ b/crates/jrsonnet-rowan-parser/src/generated/nodes.rs @@ -29,23 +29,23 @@ impl Expr { pub fn expr_base(&self) -> Option { support::child(&self.syntax) } - pub fn suffixs(&self) -> AstChildren { - support::children(&self.syntax) - } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct SuffixIndex { +pub struct ExprField { pub(crate) syntax: SyntaxNode, } -impl SuffixIndex { +impl ExprField { + pub fn base(&self) -> Option { + support::child(&self.syntax) + } pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } pub fn dot_token(&self) -> Option { support::token(&self.syntax, T![.]) } - pub fn index(&self) -> Option { + pub fn field(&self) -> Option { support::child(&self.syntax) } } @@ -61,10 +61,13 @@ impl Name { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct SuffixIndexExpr { +pub struct ExprIndex { pub(crate) syntax: SyntaxNode, } -impl SuffixIndexExpr { +impl ExprIndex { + pub fn base(&self) -> Option { + support::child(&self.syntax) + } pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } @@ -75,7 +78,7 @@ impl SuffixIndexExpr { support::token(&self.syntax, T!['[']) } pub fn index(&self) -> Option { - support::child(&self.syntax) + support::children::(&self.syntax).nth(1) } pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) @@ -83,10 +86,13 @@ impl SuffixIndexExpr { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct SuffixSlice { +pub struct ExprSlice { pub(crate) syntax: SyntaxNode, } -impl SuffixSlice { +impl ExprSlice { + pub fn base(&self) -> Option { + support::child(&self.syntax) + } pub fn slice_desc(&self) -> Option { support::child(&self.syntax) } @@ -118,10 +124,13 @@ impl SliceDesc { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct SuffixApply { +pub struct ExprCall { pub(crate) syntax: SyntaxNode, } -impl SuffixApply { +impl ExprCall { + pub fn callee(&self) -> Option { + support::child(&self.syntax) + } pub fn args_desc(&self) -> Option { support::child(&self.syntax) } @@ -190,7 +199,7 @@ impl Assertion { support::token(&self.syntax, T![:]) } pub fn message(&self) -> Option { - support::child(&self.syntax) + support::children::(&self.syntax).nth(1) } } @@ -206,7 +215,7 @@ impl ExprBinary { support::token_child(&self.syntax) } pub fn rhs(&self) -> Option { - support::child(&self.syntax) + support::children::(&self.syntax).nth(1) } } @@ -803,14 +812,6 @@ impl DestructArrayElement { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Suffix { - SuffixIndex(SuffixIndex), - SuffixIndexExpr(SuffixIndexExpr), - SuffixSlice(SuffixSlice), - SuffixApply(SuffixApply), -} - #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Bind { BindDestruct(BindDestruct), @@ -852,6 +853,10 @@ pub enum ExprBase { ExprIfThenElse(ExprIfThenElse), ExprFunction(ExprFunction), ExprError(ExprError), + ExprField(ExprField), + ExprIndex(ExprIndex), + ExprSlice(ExprSlice), + ExprCall(ExprCall), } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -1072,9 +1077,9 @@ impl AstNode for Expr { &self.syntax } } -impl AstNode for SuffixIndex { +impl AstNode for ExprField { fn can_cast(kind: SyntaxKind) -> bool { - kind == SUFFIX_INDEX + kind == EXPR_FIELD } fn cast(syntax: SyntaxNode) -> Option { if Self::can_cast(syntax.kind()) { @@ -1102,9 +1107,9 @@ impl AstNode for Name { &self.syntax } } -impl AstNode for SuffixIndexExpr { +impl AstNode for ExprIndex { fn can_cast(kind: SyntaxKind) -> bool { - kind == SUFFIX_INDEX_EXPR + kind == EXPR_INDEX } fn cast(syntax: SyntaxNode) -> Option { if Self::can_cast(syntax.kind()) { @@ -1117,9 +1122,9 @@ impl AstNode for SuffixIndexExpr { &self.syntax } } -impl AstNode for SuffixSlice { +impl AstNode for ExprSlice { fn can_cast(kind: SyntaxKind) -> bool { - kind == SUFFIX_SLICE + kind == EXPR_SLICE } fn cast(syntax: SyntaxNode) -> Option { if Self::can_cast(syntax.kind()) { @@ -1147,9 +1152,9 @@ impl AstNode for SliceDesc { &self.syntax } } -impl AstNode for SuffixApply { +impl AstNode for ExprCall { fn can_cast(kind: SyntaxKind) -> bool { - kind == SUFFIX_APPLY + kind == EXPR_CALL } fn cast(syntax: SyntaxNode) -> Option { if Self::can_cast(syntax.kind()) { @@ -1852,52 +1857,6 @@ impl AstNode for DestructArrayElement { &self.syntax } } -impl From for Suffix { - fn from(node: SuffixIndex) -> Suffix { - Suffix::SuffixIndex(node) - } -} -impl From for Suffix { - fn from(node: SuffixIndexExpr) -> Suffix { - Suffix::SuffixIndexExpr(node) - } -} -impl From for Suffix { - fn from(node: SuffixSlice) -> Suffix { - Suffix::SuffixSlice(node) - } -} -impl From for Suffix { - fn from(node: SuffixApply) -> Suffix { - Suffix::SuffixApply(node) - } -} -impl AstNode for Suffix { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - SUFFIX_INDEX | SUFFIX_INDEX_EXPR | SUFFIX_SLICE | SUFFIX_APPLY => true, - _ => false, - } - } - fn cast(syntax: SyntaxNode) -> Option { - let res = match syntax.kind() { - SUFFIX_INDEX => Suffix::SuffixIndex(SuffixIndex { syntax }), - SUFFIX_INDEX_EXPR => Suffix::SuffixIndexExpr(SuffixIndexExpr { syntax }), - SUFFIX_SLICE => Suffix::SuffixSlice(SuffixSlice { syntax }), - SUFFIX_APPLY => Suffix::SuffixApply(SuffixApply { syntax }), - _ => return None, - }; - Some(res) - } - fn syntax(&self) -> &SyntaxNode { - match self { - Suffix::SuffixIndex(it) => &it.syntax, - Suffix::SuffixIndexExpr(it) => &it.syntax, - Suffix::SuffixSlice(it) => &it.syntax, - Suffix::SuffixApply(it) => &it.syntax, - } - } -} impl From for Bind { fn from(node: BindDestruct) -> Bind { Bind::BindDestruct(node) @@ -2101,12 +2060,33 @@ impl From for ExprBase { ExprBase::ExprError(node) } } +impl From for ExprBase { + fn from(node: ExprField) -> ExprBase { + ExprBase::ExprField(node) + } +} +impl From for ExprBase { + fn from(node: ExprIndex) -> ExprBase { + ExprBase::ExprIndex(node) + } +} +impl From for ExprBase { + fn from(node: ExprSlice) -> ExprBase { + ExprBase::ExprSlice(node) + } +} +impl From for ExprBase { + fn from(node: ExprCall) -> ExprBase { + ExprBase::ExprCall(node) + } +} impl AstNode for ExprBase { fn can_cast(kind: SyntaxKind) -> bool { match kind { EXPR_BINARY | EXPR_UNARY | EXPR_OBJ_EXTEND | EXPR_PARENED | EXPR_STRING | EXPR_NUMBER | EXPR_LITERAL | EXPR_ARRAY | EXPR_OBJECT | EXPR_ARRAY_COMP - | EXPR_IMPORT | EXPR_VAR | EXPR_IF_THEN_ELSE | EXPR_FUNCTION | EXPR_ERROR => true, + | EXPR_IMPORT | EXPR_VAR | EXPR_IF_THEN_ELSE | EXPR_FUNCTION | EXPR_ERROR + | EXPR_FIELD | EXPR_INDEX | EXPR_SLICE | EXPR_CALL => true, _ => false, } } @@ -2127,6 +2107,10 @@ impl AstNode for ExprBase { EXPR_IF_THEN_ELSE => ExprBase::ExprIfThenElse(ExprIfThenElse { syntax }), EXPR_FUNCTION => ExprBase::ExprFunction(ExprFunction { syntax }), EXPR_ERROR => ExprBase::ExprError(ExprError { syntax }), + EXPR_FIELD => ExprBase::ExprField(ExprField { syntax }), + EXPR_INDEX => ExprBase::ExprIndex(ExprIndex { syntax }), + EXPR_SLICE => ExprBase::ExprSlice(ExprSlice { syntax }), + EXPR_CALL => ExprBase::ExprCall(ExprCall { syntax }), _ => return None, }; Some(res) @@ -2148,6 +2132,10 @@ impl AstNode for ExprBase { ExprBase::ExprIfThenElse(it) => &it.syntax, ExprBase::ExprFunction(it) => &it.syntax, ExprBase::ExprError(it) => &it.syntax, + ExprBase::ExprField(it) => &it.syntax, + ExprBase::ExprIndex(it) => &it.syntax, + ExprBase::ExprSlice(it) => &it.syntax, + ExprBase::ExprCall(it) => &it.syntax, } } } @@ -2765,11 +2753,6 @@ impl std::fmt::Display for CustomError { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for Suffix { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Display::fmt(self.syntax(), f) - } -} impl std::fmt::Display for Bind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) @@ -2830,7 +2813,7 @@ impl std::fmt::Display for Expr { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for SuffixIndex { +impl std::fmt::Display for ExprField { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } @@ -2840,12 +2823,12 @@ impl std::fmt::Display for Name { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for SuffixIndexExpr { +impl std::fmt::Display for ExprIndex { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for SuffixSlice { +impl std::fmt::Display for ExprSlice { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } @@ -2855,7 +2838,7 @@ impl std::fmt::Display for SliceDesc { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for SuffixApply { +impl std::fmt::Display for ExprCall { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } diff --git a/crates/jrsonnet-rowan-parser/src/generated/syntax_kinds.rs b/crates/jrsonnet-rowan-parser/src/generated/syntax_kinds.rs index 156bc9c8..91670c79 100644 --- a/crates/jrsonnet-rowan-parser/src/generated/syntax_kinds.rs +++ b/crates/jrsonnet-rowan-parser/src/generated/syntax_kinds.rs @@ -182,12 +182,12 @@ pub enum SyntaxKind { __LAST_TOKEN, SOURCE_FILE, EXPR, - SUFFIX_INDEX, + EXPR_FIELD, NAME, - SUFFIX_INDEX_EXPR, - SUFFIX_SLICE, + EXPR_INDEX, + EXPR_SLICE, SLICE_DESC, - SUFFIX_APPLY, + EXPR_CALL, ARGS_DESC, STMT_LOCAL, STMT_ASSERT, @@ -234,7 +234,6 @@ pub enum SyntaxKind { DESTRUCT_OBJECT_FIELD, DESTRUCT_REST, DESTRUCT_ARRAY_ELEMENT, - SUFFIX, BIND, STMT, OBJ_BODY, @@ -273,9 +272,9 @@ impl SyntaxKind { } pub fn is_enum(self) -> bool { match self { - SUFFIX | BIND | STMT | OBJ_BODY | COMP_SPEC | EXPR_BASE | MEMBER_COMP | MEMBER - | FIELD_NAME | DESTRUCT | DESTRUCT_ARRAY_PART | BINARY_OPERATOR | UNARY_OPERATOR - | LITERAL | TEXT | NUMBER | IMPORT_KIND | VISIBILITY | TRIVIA | CUSTOM_ERROR => true, + BIND | STMT | OBJ_BODY | COMP_SPEC | EXPR_BASE | MEMBER_COMP | MEMBER | FIELD_NAME + | DESTRUCT | DESTRUCT_ARRAY_PART | BINARY_OPERATOR | UNARY_OPERATOR | LITERAL + | TEXT | NUMBER | IMPORT_KIND | VISIBILITY | TRIVIA | CUSTOM_ERROR => true, _ => false, } } diff --git a/crates/jrsonnet-rowan-parser/src/lib.rs b/crates/jrsonnet-rowan-parser/src/lib.rs index b1bceef6..7f674b40 100644 --- a/crates/jrsonnet-rowan-parser/src/lib.rs +++ b/crates/jrsonnet-rowan-parser/src/lib.rs @@ -3,7 +3,7 @@ use event::Sink; use generated::nodes::{SourceFile, Trivia}; use lex::lex; -use parser::{LocatedSyntaxError, Parser}; +use parser::Parser; pub use rowan; mod ast; @@ -15,12 +15,15 @@ mod marker; mod parser; mod precedence; mod string_block; +mod syntax_semantics; mod tests; mod token_set; pub use ast::{AstChildren, AstNode, AstToken}; pub use generated::{nodes, syntax_kinds::SyntaxKind}; pub use language::*; +pub use parser::{ExpectedSyntax, LocatedSyntaxError, SyntaxError}; +pub use rowan::GreenNode; pub use token_set::SyntaxKindSet; use self::{ @@ -28,7 +31,10 @@ use self::{ generated::nodes::{Expr, ExprBinary, ExprObjExtend}, }; -pub fn parse(input: &str) -> (SourceFile, Vec) { +/// Parse input and return a GreenNode (thread-safe) plus errors. +/// +/// Use `source_file_from_green` to create a `SourceFile` from the green node. +pub fn parse_green(input: &str) -> (GreenNode, Vec) { let lexemes = lex(input); let kinds = lexemes .iter() @@ -40,12 +46,19 @@ pub fn parse(input: &str) -> (SourceFile, Vec) { let sink = Sink::new(events, &lexemes); let parse = sink.finish(); - ( - SourceFile { - syntax: parse.syntax(), - }, - parse.errors, - ) + (parse.green_node, parse.errors) +} + +/// Create a SourceFile from a GreenNode. +pub fn source_file_from_green(green: &GreenNode) -> SourceFile { + SourceFile { + syntax: SyntaxNode::new_root(green.clone()), + } +} + +pub fn parse(input: &str) -> (SourceFile, Vec) { + let (green, errors) = parse_green(input); + (source_file_from_green(&green), errors) } impl ExprBinary { pub fn lhs_work(&self) -> Option { @@ -69,3 +82,147 @@ impl ExprObjExtend { children.next() } } + +#[cfg(test)] +mod ast_structure_tests { + use indoc::indoc; + + use super::*; + + fn check(input: &str, expected: &str) { + let (file, errors) = parse(input); + assert!( + errors.is_empty(), + "parse errors for '{}': {:?}", + input, + errors + ); + let actual = format!("{:#?}", file.syntax()); + assert_eq!( + actual.trim(), + expected.trim(), + "AST mismatch for '{}'", + input + ); + } + + #[test] + fn field_access() { + check( + "std.length", + indoc! {r#" + SOURCE_FILE@0..10 + EXPR@0..10 + EXPR_FIELD@0..10 + EXPR@0..3 + EXPR_VAR@0..3 + NAME@0..3 + IDENT@0..3 "std" + DOT@3..4 "." + NAME@4..10 + IDENT@4..10 "length" + "#}, + ); + } + + #[test] + fn method_call() { + check( + "std.length(x)", + indoc! {r#" + SOURCE_FILE@0..13 + EXPR@0..13 + EXPR_CALL@0..13 + EXPR@0..10 + EXPR_FIELD@0..10 + EXPR@0..3 + EXPR_VAR@0..3 + NAME@0..3 + IDENT@0..3 "std" + DOT@3..4 "." + NAME@4..10 + IDENT@4..10 "length" + ARGS_DESC@10..13 + L_PAREN@10..11 "(" + ARG@11..12 + EXPR@11..12 + EXPR_VAR@11..12 + NAME@11..12 + IDENT@11..12 "x" + R_PAREN@12..13 ")" + "#}, + ); + } + + #[test] + fn chained_field_access() { + check( + "a.b.c", + indoc! {r#" + SOURCE_FILE@0..5 + EXPR@0..5 + EXPR_FIELD@0..5 + EXPR@0..3 + EXPR_FIELD@0..3 + EXPR@0..1 + EXPR_VAR@0..1 + NAME@0..1 + IDENT@0..1 "a" + DOT@1..2 "." + NAME@2..3 + IDENT@2..3 "b" + DOT@3..4 "." + NAME@4..5 + IDENT@4..5 "c" + "#}, + ); + } + + #[test] + fn index_access() { + check( + "arr[0]", + indoc! {r#" + SOURCE_FILE@0..6 + EXPR@0..6 + EXPR_INDEX@0..6 + EXPR@0..3 + EXPR_VAR@0..3 + NAME@0..3 + IDENT@0..3 "arr" + L_BRACK@3..4 "[" + EXPR@4..5 + EXPR_NUMBER@4..5 + FLOAT@4..5 "0" + R_BRACK@5..6 "]" + "#}, + ); + } + + #[test] + fn slice_access() { + check( + "arr[1:3]", + indoc! {r#" + SOURCE_FILE@0..8 + EXPR@0..8 + EXPR_SLICE@0..8 + EXPR@0..3 + EXPR_VAR@0..3 + NAME@0..3 + IDENT@0..3 "arr" + SLICE_DESC@3..8 + L_BRACK@3..4 "[" + EXPR@4..5 + EXPR_NUMBER@4..5 + FLOAT@4..5 "1" + COLON@5..6 ":" + SLICE_DESC_END@6..7 + EXPR@6..7 + EXPR_NUMBER@6..7 + FLOAT@6..7 "3" + R_BRACK@7..8 "]" + "#}, + ); + } +} diff --git a/crates/jrsonnet-rowan-parser/src/parser.rs b/crates/jrsonnet-rowan-parser/src/parser.rs index 91d3b224..f908bcaf 100644 --- a/crates/jrsonnet-rowan-parser/src/parser.rs +++ b/crates/jrsonnet-rowan-parser/src/parser.rs @@ -1,6 +1,7 @@ use std::{cell::Cell, fmt, rc::Rc}; use rowan::{GreenNode, TextRange}; +use thiserror::Error; use crate::{ event::Event, @@ -9,9 +10,13 @@ use crate::{ token_set::SyntaxKindSet, AstToken, SyntaxKind, SyntaxKind::*, - SyntaxNode, T, TS, + T, TS, }; +/// Token set for field visibility (:, ::, :::). +/// Note: We can't use VISIBILITY because Rust's macro tokenizer splits ":::" into "::" + ":" +const VISIBILITY: SyntaxKindSet = TS![: ::].with(COLONCOLONCOLON); + pub struct Parse { pub green_node: GreenNode, pub errors: Vec, @@ -29,24 +34,23 @@ pub struct Parser { steps: Cell, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq, Eq, Error)] pub enum SyntaxError { + #[error("expected {expected}, found {found:?}")] Unexpected { expected: ExpectedSyntax, found: SyntaxKind, }, - Missing { - expected: ExpectedSyntax, - }, - Custom { - error: String, - }, - Hint { - error: String, - }, + #[error("expected {expected}")] + Missing { expected: ExpectedSyntax }, + #[error("{error}")] + Custom { error: String }, + #[error("{error}")] + Hint { error: String }, } -#[derive(Debug)] +#[derive(Debug, PartialEq, Eq, Error)] +#[error("{error}")] pub struct LocatedSyntaxError { pub error: SyntaxError, pub range: TextRange, @@ -255,7 +259,7 @@ impl Drop for ExpectedSyntaxGuard { } } -#[derive(Clone, Debug, Copy)] +#[derive(Clone, Debug, Copy, PartialEq, Eq)] pub enum ExpectedSyntax { Named(&'static str), Unnamed(SyntaxKindSet), @@ -629,89 +633,117 @@ fn array(p: &mut Parser) -> CompletedMarker { m.complete(p, EXPR_ARRAY) } } -/// Returns true if it was slice, false if just index + +/// Parses safe navigation: `expr?.field` or `expr?.[index]` +/// Expects parser to be positioned at `?` with `.` following #[must_use] -fn slice_desc_or_index(p: &mut Parser) -> bool { - let m = p.start(); - p.bump(); - // TODO: do not treat :, ::, ::: as full tokens? - // Start +fn suffix_safe_nav(p: &mut Parser, m: Marker) -> CompletedMarker { + p.bump(); // ? + p.bump(); // . + if p.at(IDENT) { + name(p); + m.complete(p, EXPR_FIELD) + } else if p.at(T!['[']) { + p.bump(); // [ + expr(p); + p.expect(T![']']); + m.complete(p, EXPR_INDEX) + } else { + m.complete_missing(p, ExpectedSyntax::Named("field or index")) + } +} + +/// Parses field access: `expr.field` +/// Expects parser to be positioned at `.` +#[must_use] +fn suffix_field(p: &mut Parser, m: Marker) -> CompletedMarker { + p.bump(); // . + name(p); + m.complete(p, EXPR_FIELD) +} + +/// Parses index or slice: `expr[index]` or `expr[start:end:step]` +/// Expects parser to be positioned at `[` +#[must_use] +fn suffix_index_or_slice(p: &mut Parser, m: Marker) -> CompletedMarker { + let slice_m = p.start(); + p.bump(); // [ + + // Parse start expression if not immediately a colon if !p.at(T![:]) && !p.at(T![::]) { expr(p); } + if p.at(T![:]) { - p.bump(); - // End - if !p.at(T![']']) { - expr(p).wrap(p, SLICE_DESC_END); - } - if p.at(T![:]) { - p.bump(); - // Step - if !p.at(T![']']) { - expr(p).wrap(p, SLICE_DESC_STEP); - } - } + slice_single_colon(p); + slice_m.complete(p, SLICE_DESC); + m.complete(p, EXPR_SLICE) } else if p.at(T![::]) { - p.bump(); - // End - if !p.at(T![']']) { - expr(p).wrap(p, SLICE_DESC_END); - } + slice_double_colon(p); + slice_m.complete(p, SLICE_DESC); + m.complete(p, EXPR_SLICE) } else { - // It was not a slice p.expect(T![']']); - m.forget(p); - return false; + slice_m.forget(p); + m.complete(p, EXPR_INDEX) + } +} + +/// Continues parsing slice after single `:` - handles `[start:end]` and `[start:end:step]` +fn slice_single_colon(p: &mut Parser) { + p.bump(); // first : + if !p.at(T![:]) && !p.at(T![']']) { + expr(p).wrap(p, SLICE_DESC_END); + } + if p.at(T![:]) { + p.bump(); // second : + if !p.at(T![']']) { + expr(p).wrap(p, SLICE_DESC_STEP); + } + } + p.expect(T![']']); +} + +/// Continues parsing slice after `::` - handles `[start::step]` +fn slice_double_colon(p: &mut Parser) { + p.bump(); // :: + if !p.at(T![']']) { + expr(p).wrap(p, SLICE_DESC_END); } p.expect(T![']']); - m.complete(p, SLICE_DESC); - true } -fn suffix(p: &mut Parser) { +/// Parses function call: `expr(args)` +/// Expects parser to be positioned at `(` +#[must_use] +fn suffix_call(p: &mut Parser, m: Marker) -> CompletedMarker { + args_desc(p); + m.complete(p, EXPR_CALL) +} + +fn lhs(p: &mut Parser) -> Result { + let mut result = lhs_basic(p)?; + + // Each suffix wraps the previous result progressively loop { - let start = p.start(); - let _marker: CompletedMarker = if p.at(T![?]) { - p.bump(); - p.expect(T![.]); - if p.at(IDENT) { - name(p); - start.complete(p, SUFFIX_INDEX) - } else if p.at(T!['[']) { - p.bump(); - expr(p); - p.expect(T![']']); - start.complete(p, SUFFIX_INDEX_EXPR) - } else { - start.complete_missing(p, ExpectedSyntax::Named("index")) - } + result = if p.at(T![?]) && p.nth_at(1, T![.]) { + let m = result.wrap(p, EXPR).precede(p); + suffix_safe_nav(p, m) } else if p.at(T![.]) { - p.bump(); - name(p); - start.complete(p, SUFFIX_INDEX) + let m = result.wrap(p, EXPR).precede(p); + suffix_field(p, m) } else if p.at(T!['[']) { - if slice_desc_or_index(p) { - start.complete(p, SUFFIX_SLICE) - } else { - start.complete(p, SUFFIX_INDEX_EXPR) - } + let m = result.wrap(p, EXPR).precede(p); + suffix_index_or_slice(p, m) } else if p.at(T!['(']) { - args_desc(p); - start.complete(p, SUFFIX_APPLY) + let m = result.wrap(p, EXPR).precede(p); + suffix_call(p, m) } else { - start.forget(p); break; }; } -} - -fn lhs(p: &mut Parser) -> Result { - let lhs = lhs_basic(p)?; - - suffix(p); - Ok(lhs) + Ok(result) } fn name(p: &mut Parser) { let m = p.start(); @@ -766,7 +798,9 @@ fn destruct(p: &mut Parser) -> CompletedMarker { // } // had_rest = true; } else { + let m_elem = p.start(); destruct(p); + m_elem.complete(p, DESTRUCT_ARRAY_ELEMENT); } if p.at(T![,]) { p.bump(); @@ -835,16 +869,25 @@ fn bind(p: &mut Parser) { }; } fn text(p: &mut Parser) { - assert!(Text::can_cast(p.current())); - p.bump(); + if Text::can_cast(p.current()) { + p.bump(); + } else { + p.error_with_recovery_set(TS![]); + } } fn number(p: &mut Parser) { - assert!(Number::can_cast(p.current())); - p.bump(); + if Number::can_cast(p.current()) { + p.bump(); + } else { + p.error_with_recovery_set(TS![]); + } } fn literal(p: &mut Parser) { - assert!(Literal::can_cast(p.current())); - p.bump(); + if Literal::can_cast(p.current()) { + p.bump(); + } else { + p.error_with_recovery_set(TS![]); + } } fn lhs_basic(p: &mut Parser) -> Result { let _e = p.expected_syntax_name("expression"); @@ -900,7 +943,9 @@ fn lhs_basic(p: &mut Parser) -> Result { let m = p.start(); p.bump(); - let _ = expr_binding_power(p, right_binding_power); + if let Ok(operand) = expr_binding_power(p, right_binding_power) { + operand.precede(p).complete(p, EXPR); + } m.complete(p, EXPR_UNARY) } else if p.at(T!['(']) { let m = p.start(); @@ -912,9 +957,3 @@ fn lhs_basic(p: &mut Parser) -> Result { return Err(p.error_with_no_skip()); }) } - -impl Parse { - pub fn syntax(&self) -> SyntaxNode { - SyntaxNode::new_root(self.green_node.clone()) - } -} diff --git a/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs b/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs new file mode 100644 index 00000000..38b0658b --- /dev/null +++ b/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs @@ -0,0 +1,943 @@ +use crate::{ + nodes::{BinaryOperatorKind, UnaryOperatorKind}, + SyntaxKind, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum SemanticTokenClass { + Keyword, + Comment, + String, + Number, + Operator, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum BinaryOperatorClass { + Add, + Numeric, + Equality, + Ordering, + Membership, + LogicalAnd, + LogicalOr, + NullCoalesce, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum UnaryOperatorClass { + Numeric, + LogicalNot, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct TokenDocPurpose { + pub doc: &'static str, + pub example: &'static str, + pub outcome: TokenDocOutcome, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum TokenDocOutcome { + Number, + String, + Boolean, + Null, + Object, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct TokenSemantics { + pub kind: SyntaxKind, + pub hover_purposes: &'static [TokenDocPurpose], + pub hover_eligible: bool, + pub semantic_class: Option, + pub binary_op_class: Option, + pub unary_op_class: Option, +} + +macro_rules! token_purpose { + ($doc:expr, $example:expr, $outcome:ident) => { + TokenDocPurpose { + doc: $doc, + example: $example, + outcome: TokenDocOutcome::$outcome, + } + }; +} + +macro_rules! token_meta { + ( + $kind:ident + $(, purposes = $purposes:expr)? + $(, hover_eligible = $hover_eligible:expr)? + $(, semantic = $semantic:ident)? + $(, binary = $binary:ident)? + $(, unary = $unary:ident)? + ) => { + TokenSemantics { + kind: SyntaxKind::$kind, + hover_purposes: token_meta!(@purposes $($purposes)?), + hover_eligible: token_meta!(@bool $($hover_eligible)?), + semantic_class: token_meta!(@opt_semantic $($semantic)?), + binary_op_class: token_meta!(@opt_binary $($binary)?), + unary_op_class: token_meta!(@opt_unary $($unary)?), + } + }; + (@purposes $value:expr) => { $value }; + (@purposes) => { &[] }; + (@bool $value:expr) => { $value }; + (@bool) => { false }; + (@opt_semantic $value:ident) => { Some(SemanticTokenClass::$value) }; + (@opt_semantic) => { None }; + (@opt_binary $value:ident) => { Some(BinaryOperatorClass::$value) }; + (@opt_binary) => { None }; + (@opt_unary $value:ident) => { Some(UnaryOperatorClass::$value) }; + (@opt_unary) => { None }; +} + +const TOKEN_SEMANTICS: &[TokenSemantics] = &[ + token_meta!(IDENT, hover_eligible = true), + token_meta!(FLOAT, hover_eligible = true, semantic = Number), + token_meta!(STRING_DOUBLE, hover_eligible = true, semantic = String), + token_meta!(STRING_SINGLE, hover_eligible = true, semantic = String), + token_meta!( + STRING_DOUBLE_VERBATIM, + hover_eligible = true, + semantic = String + ), + token_meta!( + STRING_SINGLE_VERBATIM, + hover_eligible = true, + semantic = String + ), + token_meta!(STRING_BLOCK, hover_eligible = true, semantic = String), + token_meta!( + NULL_KW, + purposes = &[token_purpose!("Literal `null` value.", "null", Null)], + hover_eligible = true, + semantic = Keyword + ), + token_meta!( + TRUE_KW, + purposes = &[token_purpose!("Boolean literal `true`.", "true", Boolean)], + hover_eligible = true, + semantic = Keyword + ), + token_meta!( + FALSE_KW, + purposes = &[token_purpose!("Boolean literal `false`.", "false", Boolean)], + hover_eligible = true, + semantic = Keyword + ), + token_meta!( + SELF_KW, + purposes = &[token_purpose!( + "`self` refers to the current object value.", + "{ value: self }", + Object + )], + hover_eligible = true, + semantic = Keyword + ), + token_meta!( + SUPER_KW, + purposes = &[token_purpose!( + "`super` refers to inherited object fields.", + "{ x: 1 } + { y: super.x }", + Object + )], + hover_eligible = true, + semantic = Keyword + ), + token_meta!( + DOLLAR, + purposes = &[token_purpose!( + "`$` refers to the root object.", + "{ x: 1, y: $.x }", + Object + )], + hover_eligible = true, + semantic = Operator + ), + token_meta!( + PLUS, + purposes = &[token_purpose!( + "`+` adds numbers, concatenates strings/arrays, or merges objects.", + "1 + 2", + Number + )], + hover_eligible = true, + semantic = Operator, + binary = Add + ), + token_meta!( + MINUS, + purposes = &[token_purpose!( + "`-` subtracts numbers (or negates with unary form).", + "2 - 1", + Number + )], + hover_eligible = true, + semantic = Operator, + binary = Numeric, + unary = Numeric + ), + token_meta!( + MUL, + purposes = &[token_purpose!("`*` multiplies numbers.", "2 * 3", Number)], + hover_eligible = true, + semantic = Operator, + binary = Numeric + ), + token_meta!( + DIV, + purposes = &[token_purpose!("`/` divides numbers.", "4 / 2", Number)], + hover_eligible = true, + semantic = Operator, + binary = Numeric + ), + token_meta!( + MODULO, + purposes = &[ + token_purpose!("`%` computes numeric remainder.", "5 % 2", Number), + token_purpose!( + "`%` formats strings with placeholders.", + "\"hello %s\" % \"world\"", + String + ), + ], + hover_eligible = true, + semantic = Operator, + binary = Numeric + ), + token_meta!( + AND, + purposes = &[token_purpose!( + "`&&` requires both operands to be truthy.", + "true && false", + Boolean + )], + hover_eligible = true, + semantic = Operator, + binary = LogicalAnd + ), + token_meta!( + OR, + purposes = &[token_purpose!( + "`||` requires at least one operand to be truthy.", + "true || false", + Boolean + )], + hover_eligible = true, + semantic = Operator, + binary = LogicalOr + ), + token_meta!( + NOT, + purposes = &[token_purpose!( + "`!` negates a boolean expression.", + "!true", + Boolean + )], + hover_eligible = true, + semantic = Operator, + unary = LogicalNot + ), + token_meta!( + EQ, + purposes = &[token_purpose!( + "`==` checks value equality.", + "1 == 1", + Boolean + )], + hover_eligible = true, + semantic = Operator, + binary = Equality + ), + token_meta!( + NE, + purposes = &[token_purpose!( + "`!=` checks value inequality.", + "1 != 2", + Boolean + )], + hover_eligible = true, + semantic = Operator, + binary = Equality + ), + token_meta!( + LT, + purposes = &[token_purpose!( + "`<` checks strict less-than ordering.", + "1 < 2", + Boolean + )], + hover_eligible = true, + semantic = Operator, + binary = Ordering + ), + token_meta!( + LE, + purposes = &[token_purpose!( + "`<=` checks less-than-or-equal ordering.", + "1 <= 2", + Boolean + )], + hover_eligible = true, + semantic = Operator, + binary = Ordering + ), + token_meta!( + GT, + purposes = &[token_purpose!( + "`>` checks strict greater-than ordering.", + "2 > 1", + Boolean + )], + hover_eligible = true, + semantic = Operator, + binary = Ordering + ), + token_meta!( + GE, + purposes = &[token_purpose!( + "`>=` checks greater-than-or-equal ordering.", + "2 >= 2", + Boolean + )], + hover_eligible = true, + semantic = Operator, + binary = Ordering + ), + token_meta!( + NULL_COAELSE, + purposes = &[token_purpose!( + "`??` returns right-hand value when left side is `null`.", + "null ?? 1", + Number + )], + hover_eligible = true, + semantic = Operator, + binary = NullCoalesce + ), + token_meta!(LOCAL_KW, semantic = Keyword), + token_meta!(IF_KW, semantic = Keyword), + token_meta!(THEN_KW, semantic = Keyword), + token_meta!(ELSE_KW, semantic = Keyword), + token_meta!(FUNCTION_KW, semantic = Keyword), + token_meta!(IMPORT_KW, semantic = Keyword), + token_meta!(IMPORTSTR_KW, semantic = Keyword), + token_meta!(IMPORTBIN_KW, semantic = Keyword), + token_meta!(FOR_KW, semantic = Keyword), + token_meta!(IN_KW, semantic = Keyword, binary = Membership), + token_meta!(ERROR_KW, semantic = Keyword), + token_meta!(ASSERT_KW, semantic = Keyword), + token_meta!(TAILSTRICT_KW, semantic = Keyword), + token_meta!(SINGLE_LINE_SLASH_COMMENT, semantic = Comment), + token_meta!(SINGLE_LINE_HASH_COMMENT, semantic = Comment), + token_meta!(MULTI_LINE_COMMENT, semantic = Comment), + token_meta!(BIT_AND, semantic = Operator, binary = Numeric), + token_meta!(BIT_OR, semantic = Operator, binary = Numeric), + token_meta!(BIT_XOR, semantic = Operator, binary = Numeric), + token_meta!(BIT_NOT, semantic = Operator, unary = Numeric), + token_meta!(LHS, semantic = Operator, binary = Numeric), + token_meta!(RHS, semantic = Operator, binary = Numeric), + token_meta!(ASSIGN, semantic = Operator), +]; + +fn token_semantics(kind: SyntaxKind) -> Option<&'static TokenSemantics> { + TOKEN_SEMANTICS + .iter() + .find(|metadata| metadata.kind == kind) +} + +fn token_purpose_markdown(purpose: &TokenDocPurpose) -> String { + format!("{}\n\n```jsonnet\n{}\n```", purpose.doc, purpose.example) +} + +fn token_doc_markdown(purposes: &[TokenDocPurpose]) -> Option { + match purposes { + [] => None, + [purpose] => Some(token_purpose_markdown(purpose)), + _ => { + use std::fmt::Write as _; + + let mut markdown = String::from("This token has multiple purposes:"); + for (idx, purpose) in purposes.iter().enumerate() { + markdown.push_str("\n\n"); + if write!(markdown, "**Purpose {}**\n\n", idx + 1).is_err() { + return None; + } + markdown.push_str(&token_purpose_markdown(purpose)); + } + Some(markdown) + } + } +} + +impl SyntaxKind { + #[must_use] + pub fn token_semantics(self) -> Option<&'static TokenSemantics> { + token_semantics(self) + } + + /// Returns token documentation purposes, each with prose and an example. + #[must_use] + pub fn token_doc_purposes(self) -> &'static [TokenDocPurpose] { + self.token_semantics() + .map_or(&[], |metadata| metadata.hover_purposes) + } + + /// Returns language-level markdown documentation for tokens/operators. + #[must_use] + pub fn token_doc_markdown(self) -> Option { + token_doc_markdown(self.token_doc_purposes()) + } + + /// Tokens that should participate in hover lookup. + #[must_use] + pub fn is_hover_eligible(self) -> bool { + self.token_semantics() + .is_some_and(|metadata| metadata.hover_eligible) + } + + /// Semantic token class for this lexical token, if any. + #[must_use] + pub fn semantic_token_class(self) -> Option { + self.token_semantics() + .and_then(|metadata| metadata.semantic_class) + } + + /// Keyword tokens used for semantic highlighting. + #[must_use] + pub fn is_semantic_keyword_token(self) -> bool { + self.semantic_token_class() + .is_some_and(|class| class == SemanticTokenClass::Keyword) + } + + /// Comment tokens used for semantic highlighting. + #[must_use] + pub fn is_semantic_comment_token(self) -> bool { + self.semantic_token_class() + .is_some_and(|class| class == SemanticTokenClass::Comment) + } + + /// String tokens used for semantic highlighting. + #[must_use] + pub fn is_semantic_string_token(self) -> bool { + self.semantic_token_class() + .is_some_and(|class| class == SemanticTokenClass::String) + } + + /// Numeric tokens used for semantic highlighting. + #[must_use] + pub fn is_semantic_number_token(self) -> bool { + self.semantic_token_class() + .is_some_and(|class| class == SemanticTokenClass::Number) + } + + /// Operator tokens used for semantic highlighting. + #[must_use] + pub fn is_semantic_operator_token(self) -> bool { + self.semantic_token_class() + .is_some_and(|class| class == SemanticTokenClass::Operator) + } +} + +impl BinaryOperatorKind { + #[must_use] + pub fn class(self) -> Option { + self.token_kind() + .and_then(SyntaxKind::token_semantics) + .and_then(|metadata| metadata.binary_op_class) + } + + /// Whether this operator always yields a `number`. + #[must_use] + pub fn returns_number(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::Numeric) + } + + /// Whether this operator always yields a `boolean`. + #[must_use] + pub fn returns_boolean(self) -> bool { + self.class().is_some_and(|class| { + matches!( + class, + BinaryOperatorClass::Equality + | BinaryOperatorClass::Ordering + | BinaryOperatorClass::Membership + ) + }) + } + + /// Whether this operator is `&&`, `||`, or `??`. + #[must_use] + pub fn is_logical_short_circuit(self) -> bool { + self.class().is_some_and(|class| { + matches!( + class, + BinaryOperatorClass::LogicalAnd + | BinaryOperatorClass::LogicalOr + | BinaryOperatorClass::NullCoalesce + ) + }) + } + + #[must_use] + pub fn is_logical_and(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::LogicalAnd) + } + + #[must_use] + pub fn is_logical_or(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::LogicalOr) + } + + #[must_use] + pub fn is_equality(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::Equality) + } + + #[must_use] + pub fn is_ordering(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::Ordering) + } + + #[must_use] + pub fn is_membership(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::Membership) + } + + fn token_kind(self) -> Option { + match self { + BinaryOperatorKind::Or => Some(SyntaxKind::OR), + BinaryOperatorKind::NullCoaelse => Some(SyntaxKind::NULL_COAELSE), + BinaryOperatorKind::And => Some(SyntaxKind::AND), + BinaryOperatorKind::BitOr => Some(SyntaxKind::BIT_OR), + BinaryOperatorKind::BitXor => Some(SyntaxKind::BIT_XOR), + BinaryOperatorKind::BitAnd => Some(SyntaxKind::BIT_AND), + BinaryOperatorKind::Eq => Some(SyntaxKind::EQ), + BinaryOperatorKind::Ne => Some(SyntaxKind::NE), + BinaryOperatorKind::Lt => Some(SyntaxKind::LT), + BinaryOperatorKind::Gt => Some(SyntaxKind::GT), + BinaryOperatorKind::Le => Some(SyntaxKind::LE), + BinaryOperatorKind::Ge => Some(SyntaxKind::GE), + BinaryOperatorKind::InKw => Some(SyntaxKind::IN_KW), + BinaryOperatorKind::Lhs => Some(SyntaxKind::LHS), + BinaryOperatorKind::Rhs => Some(SyntaxKind::RHS), + BinaryOperatorKind::Plus => Some(SyntaxKind::PLUS), + BinaryOperatorKind::Minus => Some(SyntaxKind::MINUS), + BinaryOperatorKind::Mul => Some(SyntaxKind::MUL), + BinaryOperatorKind::Div => Some(SyntaxKind::DIV), + BinaryOperatorKind::Modulo => Some(SyntaxKind::MODULO), + BinaryOperatorKind::MetaObjectApply | BinaryOperatorKind::ErrorNoOperator => None, + } + } +} + +impl UnaryOperatorKind { + #[must_use] + pub fn class(self) -> Option { + self.token_kind() + .token_semantics() + .and_then(|metadata| metadata.unary_op_class) + } + + /// Whether this operator always yields a `boolean`. + #[must_use] + pub fn returns_boolean(self) -> bool { + self.class() + .is_some_and(|class| class == UnaryOperatorClass::LogicalNot) + } + + /// Whether this operator always yields a `number`. + #[must_use] + pub fn returns_number(self) -> bool { + self.class() + .is_some_and(|class| class == UnaryOperatorClass::Numeric) + } + + #[must_use] + pub fn is_logical_not(self) -> bool { + self.class() + .is_some_and(|class| class == UnaryOperatorClass::LogicalNot) + } + + fn token_kind(self) -> SyntaxKind { + match self { + UnaryOperatorKind::Minus => SyntaxKind::MINUS, + UnaryOperatorKind::Not => SyntaxKind::NOT, + UnaryOperatorKind::BitNot => SyntaxKind::BIT_NOT, + } + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashSet; + + use jrsonnet_evaluator::{State, Val}; + + use super::{ + BinaryOperatorClass, BinaryOperatorKind, SemanticTokenClass, SyntaxKind, TokenDocOutcome, + UnaryOperatorClass, UnaryOperatorKind, TOKEN_SEMANTICS, + }; + use crate::rowan::NodeOrToken; + + // Test-only explicit decision list: lexical tokens that are intentionally not + // semantic-highlighted. Coverage tests fail if any token is neither classified nor ignored. + const SEMANTIC_TOKEN_EXPLICITLY_IGNORED: &[SyntaxKind] = &[ + SyntaxKind::L_BRACK, + SyntaxKind::R_BRACK, + SyntaxKind::L_PAREN, + SyntaxKind::R_PAREN, + SyntaxKind::L_BRACE, + SyntaxKind::R_BRACE, + SyntaxKind::COLON, + SyntaxKind::COLONCOLON, + SyntaxKind::COLONCOLONCOLON, + SyntaxKind::SEMI, + SyntaxKind::DOT, + SyntaxKind::DOTDOTDOT, + SyntaxKind::COMMA, + SyntaxKind::QUESTION_MARK, + SyntaxKind::ERROR_FLOAT_JUNK_AFTER_POINT, + SyntaxKind::ERROR_FLOAT_JUNK_AFTER_EXPONENT, + SyntaxKind::ERROR_FLOAT_JUNK_AFTER_EXPONENT_SIGN, + SyntaxKind::ERROR_STRING_DOUBLE_UNTERMINATED, + SyntaxKind::ERROR_STRING_SINGLE_UNTERMINATED, + SyntaxKind::ERROR_STRING_DOUBLE_VERBATIM_UNTERMINATED, + SyntaxKind::ERROR_STRING_SINGLE_VERBATIM_UNTERMINATED, + SyntaxKind::ERROR_STRING_VERBATIM_MISSING_QUOTES, + SyntaxKind::ERROR_STRING_BLOCK_UNEXPECTED_END, + SyntaxKind::ERROR_STRING_BLOCK_MISSING_NEW_LINE, + SyntaxKind::ERROR_STRING_BLOCK_MISSING_TERMINATION, + SyntaxKind::ERROR_STRING_BLOCK_MISSING_INDENT, + SyntaxKind::WHITESPACE, + SyntaxKind::ERROR_COMMENT_TOO_SHORT, + SyntaxKind::ERROR_COMMENT_UNTERMINATED, + SyntaxKind::META_OBJECT_APPLY, + SyntaxKind::ERROR_NO_OPERATOR, + SyntaxKind::ERROR_MISSING_TOKEN, + SyntaxKind::ERROR_UNEXPECTED_TOKEN, + SyntaxKind::ERROR_CUSTOM, + SyntaxKind::LEXING_ERROR, + ]; + const BINARY_OPERATOR_EXPLICITLY_IGNORED: &[BinaryOperatorKind] = &[ + BinaryOperatorKind::MetaObjectApply, + BinaryOperatorKind::ErrorNoOperator, + ]; + + fn all_lexical_token_kinds() -> impl Iterator { + (SyntaxKind::OR.into_raw()..=SyntaxKind::LEXING_ERROR.into_raw()).map(SyntaxKind::from_raw) + } + + fn value_matches_outcome(value: &Val, outcome: TokenDocOutcome) -> bool { + match outcome { + TokenDocOutcome::Number => matches!(value, Val::Num(_)), + TokenDocOutcome::String => matches!(value, Val::Str(_)), + TokenDocOutcome::Boolean => matches!(value, Val::Bool(_)), + TokenDocOutcome::Null => matches!(value, Val::Null), + TokenDocOutcome::Object => matches!(value, Val::Obj(_)), + } + } + + #[test] + fn token_doc_lookup() { + assert_eq!( + SyntaxKind::PLUS.token_doc_markdown(), + Some( + "`+` adds numbers, concatenates strings/arrays, or merges objects.\n\n```jsonnet\n1 + 2\n```" + .to_owned() + ) + ); + assert_eq!( + SyntaxKind::NULL_COAELSE.token_doc_markdown(), + Some( + "`??` returns right-hand value when left side is `null`.\n\n```jsonnet\nnull ?? 1\n```" + .to_owned() + ) + ); + assert_eq!( + SyntaxKind::MODULO.token_doc_markdown(), + Some( + "This token has multiple purposes:\n\n**Purpose 1**\n\n`%` computes numeric remainder.\n\n```jsonnet\n5 % 2\n```\n\n**Purpose 2**\n\n`%` formats strings with placeholders.\n\n```jsonnet\n\"hello %s\" % \"world\"\n```" + .to_owned() + ) + ); + assert_eq!(SyntaxKind::IDENT.token_doc_markdown(), None); + } + + #[test] + fn hover_and_semantic_token_classification() { + assert!(SyntaxKind::IDENT.is_hover_eligible()); + assert!(SyntaxKind::PLUS.is_hover_eligible()); + assert!(!SyntaxKind::WHITESPACE.is_hover_eligible()); + assert!(SyntaxKind::LOCAL_KW.is_semantic_keyword_token()); + assert!(SyntaxKind::SINGLE_LINE_SLASH_COMMENT.is_semantic_comment_token()); + assert!(SyntaxKind::STRING_DOUBLE.is_semantic_string_token()); + assert!(SyntaxKind::FLOAT.is_semantic_number_token()); + assert!(SyntaxKind::NULL_COAELSE.is_semantic_operator_token()); + } + + #[test] + fn binary_operator_categories() { + assert_eq!( + BinaryOperatorKind::Mul.class(), + Some(BinaryOperatorClass::Numeric) + ); + assert_eq!( + BinaryOperatorKind::Eq.class(), + Some(BinaryOperatorClass::Equality) + ); + assert_eq!( + BinaryOperatorKind::And.class(), + Some(BinaryOperatorClass::LogicalAnd) + ); + assert!(BinaryOperatorKind::Mul.returns_number()); + assert!(BinaryOperatorKind::Eq.returns_boolean()); + assert!(BinaryOperatorKind::And.is_logical_short_circuit()); + assert!(BinaryOperatorKind::Eq.is_equality()); + assert!(BinaryOperatorKind::Gt.is_ordering()); + assert!(BinaryOperatorKind::InKw.is_membership()); + } + + #[test] + fn unary_operator_categories() { + assert_eq!( + UnaryOperatorKind::Not.class(), + Some(UnaryOperatorClass::LogicalNot) + ); + assert_eq!( + UnaryOperatorKind::Minus.class(), + Some(UnaryOperatorClass::Numeric) + ); + assert!(UnaryOperatorKind::Not.returns_boolean()); + assert!(UnaryOperatorKind::Minus.returns_number()); + assert!(UnaryOperatorKind::Not.is_logical_not()); + } + + #[test] + fn semantic_token_classification_covers_all_lexical_tokens() { + let mut missing = Vec::new(); + let mut conflict = Vec::new(); + + for kind in all_lexical_token_kinds() { + let classified = kind.semantic_token_class().is_some() || kind == SyntaxKind::IDENT; + let ignored = SEMANTIC_TOKEN_EXPLICITLY_IGNORED.contains(&kind); + match (classified, ignored) { + (false, false) => missing.push(kind), + (true, true) => conflict.push(kind), + _ => {} + } + } + + assert!( + conflict.is_empty(), + "semantic-token kinds cannot be both classified and ignored: {conflict:?}" + ); + assert!( + missing.is_empty(), + "lexical token kinds missing semantic classification decision: {missing:?}" + ); + } + + #[test] + fn binary_operator_classification_covers_all_variants() { + let mut missing = Vec::new(); + let mut conflict = Vec::new(); + + for op in [ + BinaryOperatorKind::Or, + BinaryOperatorKind::NullCoaelse, + BinaryOperatorKind::And, + BinaryOperatorKind::BitOr, + BinaryOperatorKind::BitXor, + BinaryOperatorKind::BitAnd, + BinaryOperatorKind::Eq, + BinaryOperatorKind::Ne, + BinaryOperatorKind::Lt, + BinaryOperatorKind::Gt, + BinaryOperatorKind::Le, + BinaryOperatorKind::Ge, + BinaryOperatorKind::InKw, + BinaryOperatorKind::Lhs, + BinaryOperatorKind::Rhs, + BinaryOperatorKind::Plus, + BinaryOperatorKind::Minus, + BinaryOperatorKind::Mul, + BinaryOperatorKind::Div, + BinaryOperatorKind::Modulo, + BinaryOperatorKind::MetaObjectApply, + BinaryOperatorKind::ErrorNoOperator, + ] { + let classified = op.class().is_some(); + let ignored = BINARY_OPERATOR_EXPLICITLY_IGNORED.contains(&op); + match (classified, ignored) { + (false, false) => missing.push(op), + (true, true) => conflict.push(op), + _ => {} + } + } + + assert!( + conflict.is_empty(), + "binary operators cannot be both classified and ignored: {conflict:?}" + ); + assert!( + missing.is_empty(), + "binary operators missing classification decision: {missing:?}" + ); + } + + #[test] + fn token_docs_are_consistent_and_examples_parse() { + let state = State::default(); + for metadata in TOKEN_SEMANTICS { + if metadata.hover_purposes.is_empty() { + continue; + } + assert!( + metadata.hover_eligible, + "token {:?} has hover docs but is not hover-eligible", + metadata.kind + ); + + for purpose in metadata.hover_purposes { + assert!( + !purpose.doc.trim().is_empty(), + "token {:?} has an empty doc purpose", + metadata.kind + ); + assert!( + !purpose.example.trim().is_empty(), + "token {:?} has an empty example purpose", + metadata.kind + ); + + let (source, errors) = crate::parse(purpose.example); + assert!( + errors.is_empty(), + "token {:?} example did not parse cleanly: {:?}, errors: {errors:?}", + metadata.kind, + purpose.example + ); + + let contains_token = source + .syntax + .descendants_with_tokens() + .filter_map(NodeOrToken::into_token) + .any(|token| token.kind() == metadata.kind); + assert!( + contains_token, + "token {:?} example does not contain the token: {:?}", + metadata.kind, purpose.example + ); + + let value = state + .evaluate_snippet("", purpose.example) + .unwrap_or_else(|err| { + panic!( + "token {:?} example did not evaluate cleanly: {:?}, error: {err:#}", + metadata.kind, purpose.example + ) + }); + assert!( + value_matches_outcome(&value, purpose.outcome), + "token {:?} example had unexpected outcome {:?}: expected {:?}, got {:?}", + metadata.kind, + purpose.example, + purpose.outcome, + value + ); + } + } + } + + #[test] + fn token_semantics_table_has_unique_kinds() { + let mut seen = HashSet::new(); + let mut duplicates = Vec::new(); + for metadata in TOKEN_SEMANTICS { + if !seen.insert(metadata.kind) { + duplicates.push(metadata.kind); + } + } + assert!( + duplicates.is_empty(), + "token semantics table contains duplicate entries: {duplicates:?}" + ); + } + + #[test] + fn token_semantics_operator_flags_are_consistent() { + let mut binary_without_operator_class = Vec::new(); + let mut unary_without_operator_class = Vec::new(); + let mut hover_docs_on_non_eligible = Vec::new(); + for metadata in TOKEN_SEMANTICS { + if metadata.binary_op_class.is_some() + && !matches!( + metadata.semantic_class, + Some(SemanticTokenClass::Operator | SemanticTokenClass::Keyword) + ) { + binary_without_operator_class.push(metadata.kind); + } + if metadata.unary_op_class.is_some() + && metadata.semantic_class != Some(SemanticTokenClass::Operator) + { + unary_without_operator_class.push(metadata.kind); + } + if !metadata.hover_purposes.is_empty() && !metadata.hover_eligible { + hover_docs_on_non_eligible.push(metadata.kind); + } + } + + assert!( + binary_without_operator_class.is_empty(), + "binary-op token semantics must be operator-classified: {binary_without_operator_class:?}" + ); + assert!( + unary_without_operator_class.is_empty(), + "unary-op token semantics must be operator-classified: {unary_without_operator_class:?}" + ); + assert!( + hover_docs_on_non_eligible.is_empty(), + "tokens with hover docs must be hover-eligible: {hover_docs_on_non_eligible:?}" + ); + } + + #[test] + fn semantic_token_class_matches_helper_methods() { + for kind in all_lexical_token_kinds() { + let class = kind.semantic_token_class(); + assert_eq!( + kind.is_semantic_keyword_token(), + class == Some(SemanticTokenClass::Keyword) + ); + assert_eq!( + kind.is_semantic_comment_token(), + class == Some(SemanticTokenClass::Comment) + ); + assert_eq!( + kind.is_semantic_string_token(), + class == Some(SemanticTokenClass::String) + ); + assert_eq!( + kind.is_semantic_number_token(), + class == Some(SemanticTokenClass::Number) + ); + assert_eq!( + kind.is_semantic_operator_token(), + class == Some(SemanticTokenClass::Operator) + ); + } + } +} diff --git a/crates/jrsonnet-rowan-parser/src/tests.rs b/crates/jrsonnet-rowan-parser/src/tests.rs index 34181c1b..77431b36 100644 --- a/crates/jrsonnet-rowan-parser/src/tests.rs +++ b/crates/jrsonnet-rowan-parser/src/tests.rs @@ -1,4 +1,4 @@ -// `never` +// `never` - tests disabled due to missing deps (miette, jrsonnet-stdlib) #![cfg(any())] use miette::{ diff --git a/crates/jrsonnet-rowan-parser/src/token_set.rs b/crates/jrsonnet-rowan-parser/src/token_set.rs index 24055fc4..9f5cb437 100644 --- a/crates/jrsonnet-rowan-parser/src/token_set.rs +++ b/crates/jrsonnet-rowan-parser/src/token_set.rs @@ -2,7 +2,7 @@ use std::fmt; use crate::SyntaxKind; -#[derive(Clone, Copy, Default)] +#[derive(Clone, Copy, Default, PartialEq, Eq)] pub struct SyntaxKindSet(u128); impl SyntaxKindSet { diff --git a/crates/jrsonnet-std-sig/Cargo.toml b/crates/jrsonnet-std-sig/Cargo.toml new file mode 100644 index 00000000..2a8f202a --- /dev/null +++ b/crates/jrsonnet-std-sig/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "jrsonnet-std-sig" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Jsonnet stdlib function specifications for LSP" + +[dependencies] + +[dev-dependencies] +jrsonnet-rowan-parser.workspace = true + +[lints] +workspace = true diff --git a/crates/jrsonnet-std-sig/data/jrsonnet_legacy_functions.txt b/crates/jrsonnet-std-sig/data/jrsonnet_legacy_functions.txt new file mode 100644 index 00000000..aab563fc --- /dev/null +++ b/crates/jrsonnet-std-sig/data/jrsonnet_legacy_functions.txt @@ -0,0 +1,150 @@ +abs +acos +all +any +asciiLower +asciiUpper +asin +assertEqual +atan +avg +base64 +base64Decode +base64DecodeBytes +bigint +ceil +char +clamp +codepoint +contains +cos +count +decodeUTF8 +deepJoin +encodeUTF8 +endsWith +equals +equalsIgnoreCase +escapeStringBash +escapeStringDollars +escapeStringJson +escapeStringPython +escapeStringXML +escapeStringXml +exp +exponent +extVar +filter +filterMap +find +findSubstr +flatMap +flatten +flattenArrays +flattenDeepArray +floor +foldl +foldr +format +get +isArray +isBoolean +isDecimal +isEmpty +isEven +isFunction +isInteger +isNumber +isObject +isOdd +isString +join +length +lines +log +lstripChars +makeArray +manifestIni +manifestJson +manifestJsonEx +manifestJsonMinified +manifestPython +manifestPythonVars +manifestToml +manifestTomlEx +manifestXmlJsonml +manifestYamlDoc +manifestYamlStream +mantissa +map +mapWithIndex +mapWithKey +max +maxArray +md5 +member +mergePatch +min +minArray +mod +modulo +native +objectFields +objectFieldsAll +objectFieldsEx +objectHas +objectHasAll +objectHasEx +objectKeysValues +objectKeysValuesAll +objectRemoveKey +objectValues +objectValuesAll +parseHex +parseInt +parseJson +parseOctal +parseYaml +pow +primitiveEquals +prune +range +regexQuoteMeta +remove +removeAt +repeat +resolvePath +reverse +round +rstripChars +set +setDiff +setInter +setMember +setUnion +sha1 +sha256 +sha3 +sha512 +sign +sin +slice +sort +split +splitLimit +splitLimitR +sqrt +startsWith +strReplace +stringChars +stripChars +substr +sum +tan +thisFile +toString +trace +type +uniq +xnor +xor diff --git a/crates/jrsonnet-std-sig/data/official_functions.txt b/crates/jrsonnet-std-sig/data/official_functions.txt new file mode 100644 index 00000000..edc5f38d --- /dev/null +++ b/crates/jrsonnet-std-sig/data/official_functions.txt @@ -0,0 +1,145 @@ +abs +acos +all +any +asciiLower +asciiUpper +asin +assertEqual +atan +atan2 +avg +base64 +base64Decode +base64DecodeBytes +ceil +char +clamp +codepoint +contains +cos +count +decodeUTF8 +deepJoin +deg2rad +encodeUTF8 +endsWith +equalsIgnoreCase +escapeStringBash +escapeStringDollars +escapeStringJson +escapeStringPython +escapeStringXml +exp +exponent +extVar +filter +filterMap +find +findSubstr +flatMap +flattenArrays +flattenDeepArray +floor +foldl +foldr +format +get +hypot +isArray +isBoolean +isDecimal +isEmpty +isEven +isFunction +isInteger +isNull +isNumber +isObject +isOdd +isString +join +length +lines +log +log10 +log2 +lstripChars +makeArray +manifestIni +manifestJson +manifestJsonEx +manifestJsonMinified +manifestPython +manifestPythonVars +manifestTomlEx +manifestXmlJsonml +manifestYamlDoc +manifestYamlStream +mantissa +map +mapWithIndex +mapWithKey +max +maxArray +md5 +member +mergePatch +min +minArray +mod +objectFields +objectFieldsAll +objectHas +objectHasAll +objectKeysValues +objectKeysValuesAll +objectRemoveKey +objectValues +objectValuesAll +parseHex +parseInt +parseJson +parseOctal +parseYaml +pow +prune +rad2deg +range +remove +removeAt +repeat +reverse +round +rstripChars +set +setDiff +setInter +setMember +setUnion +sha1 +sha256 +sha3 +sha512 +sign +sin +slice +sort +split +splitLimit +splitLimitR +sqrt +startsWith +strReplace +stringChars +stripChars +substr +sum +tan +toString +trace +trim +type +uniq +xnor +xor diff --git a/crates/jrsonnet-std-sig/data/rjsonnet_functions.txt b/crates/jrsonnet-std-sig/data/rjsonnet_functions.txt new file mode 100644 index 00000000..18ee0d02 --- /dev/null +++ b/crates/jrsonnet-std-sig/data/rjsonnet_functions.txt @@ -0,0 +1,147 @@ +abs +acos +all +any +asciiLower +asciiUpper +asin +assertEqual +atan +atan2 +avg +base64 +base64Decode +base64DecodeBytes +ceil +char +clamp +codepoint +contains +cos +count +decodeUTF8 +deepJoin +deg2rad +encodeUTF8 +endsWith +equals +equalsIgnoreCase +escapeStringBash +escapeStringDollars +escapeStringJson +escapeStringPython +escapeStringXml +exp +exponent +extVar +filter +filterMap +find +findSubstr +flatMap +flattenArrays +flattenDeepArray +floor +foldl +foldr +format +get +hypot +isArray +isBoolean +isDecimal +isEmpty +isEven +isFunction +isInteger +isNumber +isObject +isOdd +isString +join +length +lines +log +log10 +log2 +lstripChars +makeArray +manifestIni +manifestJson +manifestJsonEx +manifestJsonMinified +manifestPython +manifestPythonVars +manifestTomlEx +manifestXmlJsonml +manifestYamlDoc +manifestYamlStream +mantissa +map +mapWithIndex +mapWithKey +max +maxArray +md5 +member +mergePatch +min +minArray +native +objectFields +objectFieldsAll +objectHas +objectHasAll +objectHasEx +objectKeysValues +objectKeysValuesAll +objectRemoveKey +objectValues +objectValuesAll +parseHex +parseInt +parseJson +parseOctal +parseYaml +pi +pow +prune +rad2deg +range +remove +removeAt +repeat +reverse +round +rstripChars +set +setDiff +setInter +setMember +setUnion +sha1 +sha256 +sha3 +sha512 +sign +sin +slice +sort +split +splitLimit +splitLimitR +sqrt +startsWith +strReplace +stringChars +stripChars +substr +sum +tan +thisFile +toString +trace +trim +uniq +xnor +xor diff --git a/crates/jrsonnet-std-sig/src/lib.rs b/crates/jrsonnet-std-sig/src/lib.rs new file mode 100644 index 00000000..351bb376 --- /dev/null +++ b/crates/jrsonnet-std-sig/src/lib.rs @@ -0,0 +1,1886 @@ +//! Jsonnet standard library function specifications. +//! +//! This crate provides a single source of truth for all stdlib function metadata: +//! - Parameter names and types +//! - Return types +//! - Documentation +//! - Flow typing information (for type narrowing in conditionals) +//! +//! This data is used by: +//! - `jrsonnet-lsp-stdlib` for type signatures and documentation +//! - `jrsonnet-lsp-inference` for flow-sensitive type narrowing + +/// Parameter type specification. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ParamType { + /// Any type + Any, + /// Null type + Null, + /// Boolean type + Bool, + /// Number type + Number, + /// String type + String, + /// Character type (single-char string) + Char, + /// Array of any elements + Array, + /// Array of numbers + ArrayNumber, + /// Array of strings + ArrayString, + /// Array of characters + ArrayChar, + /// Array of booleans + ArrayBool, + /// Object type + Object, + /// Function type + Function, + /// String or array + StringOrArray, + /// String, array, object, or function (for std.length) + Lengthable, +} + +/// Return type specification. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ReturnSpec { + /// Fixed return type + Fixed(ParamType), + /// Same type as argument at given index + SameAsArg(usize), + /// Non-negative number (length result) + NonNegative, + /// Array with element type from function return at param index + ArrayOfFuncReturn(usize), + /// Array with same element type as input array at param index + ArrayWithSameElements(usize), + /// Set (sorted unique) with same element type as input at param index + SetWithSameElements(usize), + /// Array of values from object at param index + ObjectValuesType(usize), + /// Flattened result of map function at param index + FlatMapResult(usize), + /// Any type (unknown) + Any, +} + +impl Default for ReturnSpec { + fn default() -> Self { + Self::Any + } +} + +/// Parameter definition. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct Param { + /// Parameter name + pub name: &'static str, + /// Parameter type + pub ty: ParamType, + /// Whether this parameter has a default value + pub has_default: bool, +} + +impl Param { + /// Create a required parameter. + #[must_use] + pub const fn req(name: &'static str, ty: ParamType) -> Self { + Self { + name, + ty, + has_default: false, + } + } + + /// Create an optional parameter. + #[must_use] + pub const fn opt(name: &'static str, ty: ParamType) -> Self { + Self { + name, + ty, + has_default: true, + } + } +} + +/// Totality of a flow typing predicate. +/// +/// Determines whether the negation of the predicate provides useful type information. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Totality { + /// Total predicates can be negated. + /// Example: if `std.isNumber(x)` is false, then x is definitely NOT a number. + Total, + /// Partial predicates cannot be negated. + /// Example: if `std.isInteger(x)` is false, x might still be a decimal number. + Partial, +} + +/// Type that a predicate narrows to. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum NarrowsTo { + Null, + Bool, + Number, + String, + Array, + Object, + Function, +} + +/// Flow typing information for type-guard functions. +/// +/// When a function like `std.isNumber(x)` returns true, we can narrow +/// the type of `x` to `Number` in the then-branch. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct FlowTyping { + /// Index of the parameter being narrowed (usually 0) + pub param_idx: usize, + /// Type that the parameter is narrowed to when predicate is true + pub narrows_to: NarrowsTo, + /// Whether the predicate is total (can negate) or partial (cannot) + pub totality: Totality, +} + +/// Complete specification for a stdlib function. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct StdFn { + /// Function name (without "std." prefix) + pub name: &'static str, + /// Parameter definitions + pub params: &'static [Param], + /// Return type specification + pub return_spec: ReturnSpec, + /// Whether function is variadic + pub variadic: bool, + /// Short documentation + pub doc: &'static str, + /// Example usage (optional) + pub example: Option<&'static str>, + /// Flow typing info (for type guards like std.isNumber) + pub flow_typing: Option, +} + +// Helper constants for common parameter types +const ANY: ParamType = ParamType::Any; +const NUM: ParamType = ParamType::Number; +const STR: ParamType = ParamType::String; +const BOOL: ParamType = ParamType::Bool; +const ARR: ParamType = ParamType::Array; +const ARR_NUM: ParamType = ParamType::ArrayNumber; +const ARR_STR: ParamType = ParamType::ArrayString; +const ARR_CHAR: ParamType = ParamType::ArrayChar; +const OBJ: ParamType = ParamType::Object; +const FUNC: ParamType = ParamType::Function; +const CHAR: ParamType = ParamType::Char; +const LEN: ParamType = ParamType::Lengthable; + +// Helper for creating flow typing info +const fn flow(narrows_to: NarrowsTo, totality: Totality) -> FlowTyping { + FlowTyping { + param_idx: 0, + narrows_to, + totality, + } +} + +/// All stdlib function specifications. +pub static FNS: &[StdFn] = &[ + // ========================================================================== + // Type checking functions + // ========================================================================== + StdFn { + name: "type", + params: &[Param::req("x", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Returns the type of `x` as a string.", + example: Some(r#"std.type([1,2]) // "array""#), + flow_typing: None, + }, + StdFn { + name: "isNull", + params: &[Param::req("x", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `x` is null.", + example: Some("std.isNull(null) // true"), + flow_typing: Some(flow(NarrowsTo::Null, Totality::Total)), + }, + StdFn { + name: "isString", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is a string.", + example: None, + flow_typing: Some(flow(NarrowsTo::String, Totality::Total)), + }, + StdFn { + name: "isNumber", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is a number.", + example: None, + flow_typing: Some(flow(NarrowsTo::Number, Totality::Total)), + }, + StdFn { + name: "isBoolean", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is a boolean.", + example: None, + flow_typing: Some(flow(NarrowsTo::Bool, Totality::Total)), + }, + StdFn { + name: "isObject", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is an object.", + example: None, + flow_typing: Some(flow(NarrowsTo::Object, Totality::Total)), + }, + StdFn { + name: "isArray", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is an array.", + example: None, + flow_typing: Some(flow(NarrowsTo::Array, Totality::Total)), + }, + StdFn { + name: "isFunction", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is a function.", + example: None, + flow_typing: Some(flow(NarrowsTo::Function, Totality::Total)), + }, + StdFn { + name: "isEmpty", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if string, array, or object is empty.", + example: None, + flow_typing: None, + }, + StdFn { + name: "isInteger", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is an integer.", + example: None, + flow_typing: Some(flow(NarrowsTo::Number, Totality::Partial)), + }, + StdFn { + name: "isDecimal", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is a decimal (has fractional part).", + example: None, + flow_typing: Some(flow(NarrowsTo::Number, Totality::Partial)), + }, + StdFn { + name: "isEven", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is an even integer.", + example: None, + flow_typing: Some(flow(NarrowsTo::Number, Totality::Partial)), + }, + StdFn { + name: "isOdd", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is an odd integer.", + example: None, + flow_typing: Some(flow(NarrowsTo::Number, Totality::Partial)), + }, + // ========================================================================== + // Length and basic array functions + // ========================================================================== + StdFn { + name: "length", + params: &[Param::req("x", LEN)], + return_spec: ReturnSpec::NonNegative, + variadic: false, + doc: "Returns the length of an array, string, object, or function parameters.", + example: Some("std.length([1,2,3]) // 3"), + flow_typing: None, + }, + StdFn { + name: "makeArray", + params: &[Param::req("sz", NUM), Param::req("func", FUNC)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Creates an array of size `sz` by calling `func(i)` for each index.", + example: Some("std.makeArray(3, function(i) i * 2) // [0, 2, 4]"), + flow_typing: None, + }, + // ========================================================================== + // Higher-order array functions + // ========================================================================== + StdFn { + name: "map", + params: &[Param::req("func", FUNC), Param::req("arr", ARR)], + return_spec: ReturnSpec::ArrayOfFuncReturn(0), + variadic: false, + doc: "Applies `func` to each element of `arr`.", + example: Some("std.map(function(x) x * 2, [1,2,3]) // [2, 4, 6]"), + flow_typing: None, + }, + StdFn { + name: "mapWithIndex", + params: &[Param::req("func", FUNC), Param::req("arr", ARR)], + return_spec: ReturnSpec::ArrayOfFuncReturn(0), + variadic: false, + doc: "Like `map`, but `func` takes `(index, element)`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "mapWithKey", + params: &[Param::req("func", FUNC), Param::req("obj", OBJ)], + return_spec: ReturnSpec::Fixed(OBJ), + variadic: false, + doc: "Applies `func(key, value)` to each field of `obj`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "filter", + params: &[Param::req("func", FUNC), Param::req("arr", ARR)], + return_spec: ReturnSpec::ArrayWithSameElements(1), + variadic: false, + doc: "Returns elements of `arr` where `func(x)` is true.", + example: Some("std.filter(function(x) x > 1, [1,2,3]) // [2, 3]"), + flow_typing: None, + }, + StdFn { + name: "flatMap", + params: &[Param::req("func", FUNC), Param::req("arr", ARR)], + return_spec: ReturnSpec::FlatMapResult(0), + variadic: false, + doc: "Maps `func` over `arr` and flattens the result.", + example: None, + flow_typing: None, + }, + StdFn { + name: "filterMap", + params: &[ + Param::req("filter_func", FUNC), + Param::req("map_func", FUNC), + Param::req("arr", ARR), + ], + return_spec: ReturnSpec::ArrayOfFuncReturn(1), + variadic: false, + doc: "Filters then maps array elements.", + example: None, + flow_typing: None, + }, + StdFn { + name: "foldl", + params: &[ + Param::req("func", FUNC), + Param::req("arr", ARR), + Param::req("init", ANY), + ], + return_spec: ReturnSpec::SameAsArg(2), + variadic: false, + doc: "Left fold: `func(func(func(init, arr[0]), arr[1]), ...)`.", + example: Some("std.foldl(function(a, b) a + b, [1,2,3], 0) // 6"), + flow_typing: None, + }, + StdFn { + name: "foldr", + params: &[ + Param::req("func", FUNC), + Param::req("arr", ARR), + Param::req("init", ANY), + ], + return_spec: ReturnSpec::SameAsArg(2), + variadic: false, + doc: "Right fold: `func(arr[0], func(arr[1], ... init))`.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Array functions + // ========================================================================== + StdFn { + name: "range", + params: &[Param::req("from", NUM), Param::req("to", NUM)], + return_spec: ReturnSpec::Fixed(ARR_NUM), + variadic: false, + doc: "Returns an array `[from, from+1, ..., to]`.", + example: Some("std.range(1, 5) // [1, 2, 3, 4, 5]"), + flow_typing: None, + }, + StdFn { + name: "slice", + params: &[ + Param::req("arr", ANY), + Param::req("index", NUM), + Param::req("end", NUM), + Param::opt("step", NUM), + ], + return_spec: ReturnSpec::ArrayWithSameElements(0), + variadic: false, + doc: "Slices array from `index` to `end` with `step`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "join", + params: &[Param::req("sep", STR), Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Joins array elements with separator.", + example: Some(r#"std.join(",", ["a","b","c"]) // "a,b,c""#), + flow_typing: None, + }, + StdFn { + name: "deepJoin", + params: &[Param::req("arr", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Recursively joins nested arrays into a string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "lines", + params: &[Param::req("arr", ARR_STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Joins array with newlines and adds trailing newline.", + example: None, + flow_typing: None, + }, + StdFn { + name: "reverse", + params: &[Param::req("arr", ANY)], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + doc: "Reverses an array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "member", + params: &[Param::req("arr", ARR), Param::req("x", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `x` is in `arr`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "find", + params: &[Param::req("value", ANY), Param::req("arr", ANY)], + return_spec: ReturnSpec::Fixed(ARR_NUM), + variadic: false, + doc: "Returns indices where `arr[i] == value`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "contains", + params: &[Param::req("arr", ANY), Param::req("x", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if array contains `x`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "count", + params: &[Param::req("arr", ANY), Param::req("x", ANY)], + return_spec: ReturnSpec::NonNegative, + variadic: false, + doc: "Counts occurrences of `x` in `arr`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "all", + params: &[Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if all elements are truthy.", + example: None, + flow_typing: None, + }, + StdFn { + name: "any", + params: &[Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if any element is truthy.", + example: None, + flow_typing: None, + }, + StdFn { + name: "avg", + params: &[Param::req("arr", ARR_NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns average of numeric array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sort", + params: &[Param::req("arr", ARR), Param::opt("keyF", FUNC)], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + doc: "Sorts array, optionally by key function.", + example: Some("std.sort([3,1,2]) // [1, 2, 3]"), + flow_typing: None, + }, + StdFn { + name: "uniq", + params: &[Param::req("arr", ARR), Param::opt("keyF", FUNC)], + return_spec: ReturnSpec::SetWithSameElements(0), + variadic: false, + doc: "Removes consecutive duplicates.", + example: None, + flow_typing: None, + }, + StdFn { + name: "set", + params: &[Param::req("arr", ARR), Param::opt("keyF", FUNC)], + return_spec: ReturnSpec::SetWithSameElements(0), + variadic: false, + doc: "Returns sorted unique elements.", + example: None, + flow_typing: None, + }, + StdFn { + name: "setUnion", + params: &[ + Param::req("a", ARR), + Param::req("b", ARR), + Param::opt("keyF", FUNC), + ], + return_spec: ReturnSpec::SetWithSameElements(0), + variadic: false, + doc: "Union of two sets.", + example: None, + flow_typing: None, + }, + StdFn { + name: "setInter", + params: &[ + Param::req("a", ARR), + Param::req("b", ARR), + Param::opt("keyF", FUNC), + ], + return_spec: ReturnSpec::SetWithSameElements(0), + variadic: false, + doc: "Intersection of two sets.", + example: None, + flow_typing: None, + }, + StdFn { + name: "setDiff", + params: &[ + Param::req("a", ARR), + Param::req("b", ARR), + Param::opt("keyF", FUNC), + ], + return_spec: ReturnSpec::SetWithSameElements(0), + variadic: false, + doc: "Difference of two sets (a - b).", + example: None, + flow_typing: None, + }, + StdFn { + name: "setMember", + params: &[ + Param::req("x", ANY), + Param::req("arr", ARR), + Param::opt("keyF", FUNC), + ], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `x` is in set.", + example: None, + flow_typing: None, + }, + StdFn { + name: "flatten", + params: &[Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Flattens nested arrays by one level.", + example: None, + flow_typing: None, + }, + StdFn { + name: "flattenArrays", + params: &[Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Alias for `flatten`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "flattenDeepArray", + params: &[Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Recursively flattens all nested arrays.", + example: None, + flow_typing: None, + }, + StdFn { + name: "remove", + params: &[Param::req("arr", ANY), Param::req("elem", ANY)], + return_spec: ReturnSpec::ArrayWithSameElements(0), + variadic: false, + doc: "Removes first occurrence of element from array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "removeAt", + params: &[Param::req("arr", ANY), Param::req("idx", NUM)], + return_spec: ReturnSpec::ArrayWithSameElements(0), + variadic: false, + doc: "Removes element at index from array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "repeat", + params: &[Param::req("arr", ANY), Param::req("n", NUM)], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + doc: "Repeats array `n` times.", + example: None, + flow_typing: None, + }, + StdFn { + name: "prune", + params: &[Param::req("x", ANY)], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + doc: "Recursively removes nulls, empty arrays, and empty objects.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // String functions + // ========================================================================== + StdFn { + name: "toString", + params: &[Param::req("x", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts value to string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "format", + params: &[Param::req("fmt", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: true, + doc: "Printf-style formatting.", + example: Some(r#"std.format("Hello %s", ["world"]) // "Hello world""#), + flow_typing: None, + }, + StdFn { + name: "escapeStringJson", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes string for JSON.", + example: None, + flow_typing: None, + }, + StdFn { + name: "escapeStringBash", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes string for Bash.", + example: None, + flow_typing: None, + }, + StdFn { + name: "escapeStringDollars", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes `$` characters.", + example: None, + flow_typing: None, + }, + StdFn { + name: "escapeStringPython", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes string for Python.", + example: None, + flow_typing: None, + }, + StdFn { + name: "escapeStringXml", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes string for XML.", + example: None, + flow_typing: None, + }, + StdFn { + name: "escapeStringXML", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes string for XML (alias).", + example: None, + flow_typing: None, + }, + StdFn { + name: "substr", + params: &[ + Param::req("s", STR), + Param::req("from", NUM), + Param::req("len", NUM), + ], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Returns substring.", + example: None, + flow_typing: None, + }, + StdFn { + name: "split", + params: &[Param::req("str", STR), Param::req("c", STR)], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Splits string by delimiter.", + example: Some(r#"std.split("a,b,c", ",") // ["a", "b", "c"]"#), + flow_typing: None, + }, + StdFn { + name: "splitLimit", + params: &[ + Param::req("str", STR), + Param::req("c", STR), + Param::req("maxsplits", NUM), + ], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Splits with maximum splits.", + example: None, + flow_typing: None, + }, + StdFn { + name: "splitLimitR", + params: &[ + Param::req("str", STR), + Param::req("c", STR), + Param::req("maxsplits", NUM), + ], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Splits from right with maximum.", + example: None, + flow_typing: None, + }, + StdFn { + name: "strReplace", + params: &[ + Param::req("str", STR), + Param::req("from", STR), + Param::req("to", STR), + ], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Replaces all occurrences.", + example: Some(r#"std.strReplace("foo", "o", "0") // "f00""#), + flow_typing: None, + }, + StdFn { + name: "stripChars", + params: &[Param::req("str", STR), Param::req("chars", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Strips characters from both ends.", + example: None, + flow_typing: None, + }, + StdFn { + name: "lstripChars", + params: &[Param::req("str", STR), Param::req("chars", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Strips characters from left.", + example: None, + flow_typing: None, + }, + StdFn { + name: "rstripChars", + params: &[Param::req("str", STR), Param::req("chars", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Strips characters from right.", + example: None, + flow_typing: None, + }, + StdFn { + name: "trim", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Trims leading and trailing whitespace.", + example: Some(r#"std.trim(" hello ") // "hello""#), + flow_typing: None, + }, + StdFn { + name: "asciiLower", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to lowercase ASCII.", + example: None, + flow_typing: None, + }, + StdFn { + name: "asciiUpper", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to uppercase ASCII.", + example: None, + flow_typing: None, + }, + StdFn { + name: "char", + params: &[Param::req("n", NUM)], + return_spec: ReturnSpec::Fixed(CHAR), + variadic: false, + doc: "Returns character for codepoint.", + example: Some(r#"std.char(65) // "A""#), + flow_typing: None, + }, + StdFn { + name: "codepoint", + params: &[Param::req("c", CHAR)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns codepoint for character.", + example: Some(r#"std.codepoint("A") // 65"#), + flow_typing: None, + }, + StdFn { + name: "startsWith", + params: &[Param::req("str", STR), Param::req("prefix", STR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if string starts with prefix.", + example: None, + flow_typing: None, + }, + StdFn { + name: "endsWith", + params: &[Param::req("str", STR), Param::req("suffix", STR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if string ends with suffix.", + example: None, + flow_typing: None, + }, + StdFn { + name: "findSubstr", + params: &[Param::req("pat", STR), Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(ARR_NUM), + variadic: false, + doc: "Returns indices where pattern matches.", + example: None, + flow_typing: None, + }, + StdFn { + name: "stringChars", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(ARR_CHAR), + variadic: false, + doc: "Returns array of single-character strings.", + example: None, + flow_typing: None, + }, + StdFn { + name: "equalsIgnoreCase", + params: &[Param::req("str1", STR), Param::req("str2", STR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Case-insensitive string equality.", + example: None, + flow_typing: None, + }, + StdFn { + name: "parseInt", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Parses integer from string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "parseOctal", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Parses octal integer.", + example: None, + flow_typing: None, + }, + StdFn { + name: "parseHex", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Parses hexadecimal integer.", + example: None, + flow_typing: None, + }, + StdFn { + name: "parseJson", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Parses JSON string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "parseYaml", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Parses YAML string.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Object functions + // ========================================================================== + StdFn { + name: "objectFields", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Returns array of field names.", + example: Some(r#"std.objectFields({a: 1, b: 2}) // ["a", "b"]"#), + flow_typing: None, + }, + StdFn { + name: "objectFieldsAll", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Returns all field names including hidden.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectHas", + params: &[Param::req("obj", OBJ), Param::req("f", STR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if object has field.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectHasAll", + params: &[Param::req("obj", OBJ), Param::req("f", STR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true including hidden fields.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectValues", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::ObjectValuesType(0), + variadic: false, + doc: "Returns array of field values.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectValuesAll", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::ObjectValuesType(0), + variadic: false, + doc: "Returns all values including hidden.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectKeysValues", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Returns array of `{key, value}` objects.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectKeysValuesAll", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Includes hidden fields.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectFieldsEx", + params: &[Param::req("obj", OBJ), Param::req("hidden", BOOL)], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Returns field names with hidden control.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectHasEx", + params: &[ + Param::req("obj", OBJ), + Param::req("f", STR), + Param::req("hidden", BOOL), + ], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Checks for field with hidden control.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectRemoveKey", + params: &[Param::req("obj", OBJ), Param::req("key", STR)], + return_spec: ReturnSpec::Fixed(OBJ), + variadic: false, + doc: "Returns object with key removed.", + example: None, + flow_typing: None, + }, + StdFn { + name: "get", + params: &[ + Param::req("obj", OBJ), + Param::req("f", STR), + Param::opt("default", ANY), + Param::opt("inc_hidden", BOOL), + ], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Gets field with default.", + example: None, + flow_typing: None, + }, + StdFn { + name: "equals", + params: &[Param::req("a", ANY), Param::req("b", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Deep equality comparison.", + example: None, + flow_typing: None, + }, + StdFn { + name: "primitiveEquals", + params: &[Param::req("a", ANY), Param::req("b", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Equality for primitives (non-recursive).", + example: None, + flow_typing: None, + }, + StdFn { + name: "mergePatch", + params: &[Param::req("target", ANY), Param::req("patch", ANY)], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + doc: "JSON Merge Patch (RFC 7396).", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Math functions + // ========================================================================== + StdFn { + name: "abs", + params: &[Param::req("n", NUM)], + return_spec: ReturnSpec::NonNegative, + variadic: false, + doc: "Absolute value.", + example: None, + flow_typing: None, + }, + StdFn { + name: "pi", + params: &[], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "The mathematical constant pi.", + example: Some("std.pi // 3.141592653589793"), + flow_typing: None, + }, + StdFn { + name: "sign", + params: &[Param::req("n", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns -1, 0, or 1.", + example: None, + flow_typing: None, + }, + StdFn { + name: "max", + params: &[Param::req("a", NUM), Param::req("b", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns maximum.", + example: None, + flow_typing: None, + }, + StdFn { + name: "min", + params: &[Param::req("a", NUM), Param::req("b", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns minimum.", + example: None, + flow_typing: None, + }, + StdFn { + name: "clamp", + params: &[ + Param::req("x", NUM), + Param::req("minVal", NUM), + Param::req("maxVal", NUM), + ], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Clamps value to range.", + example: None, + flow_typing: None, + }, + StdFn { + name: "pow", + params: &[Param::req("x", NUM), Param::req("n", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns x to the power n.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sqrt", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Square root.", + example: None, + flow_typing: None, + }, + StdFn { + name: "exp", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "e to the power x.", + example: None, + flow_typing: None, + }, + StdFn { + name: "log", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Natural logarithm.", + example: None, + flow_typing: None, + }, + StdFn { + name: "log2", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Base-2 logarithm.", + example: Some("std.log2(8) // 3"), + flow_typing: None, + }, + StdFn { + name: "log10", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Base-10 logarithm.", + example: Some("std.log10(100) // 2"), + flow_typing: None, + }, + StdFn { + name: "sin", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Sine (radians).", + example: None, + flow_typing: None, + }, + StdFn { + name: "cos", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Cosine (radians).", + example: None, + flow_typing: None, + }, + StdFn { + name: "tan", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Tangent (radians).", + example: None, + flow_typing: None, + }, + StdFn { + name: "asin", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Arc sine.", + example: None, + flow_typing: None, + }, + StdFn { + name: "acos", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Arc cosine.", + example: None, + flow_typing: None, + }, + StdFn { + name: "atan", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Arc tangent.", + example: None, + flow_typing: None, + }, + StdFn { + name: "atan2", + params: &[Param::req("y", NUM), Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Arc tangent of y/x using the signs of both arguments.", + example: Some("std.atan2(1, 1) // 0.7853981633974483"), + flow_typing: None, + }, + StdFn { + name: "hypot", + params: &[Param::req("a", NUM), Param::req("b", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Hypotenuse length sqrt(a*a + b*b).", + example: Some("std.hypot(3, 4) // 5"), + flow_typing: None, + }, + StdFn { + name: "deg2rad", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Converts degrees to radians.", + example: Some("std.deg2rad(180) // 3.141592653589793"), + flow_typing: None, + }, + StdFn { + name: "rad2deg", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Converts radians to degrees.", + example: Some("std.rad2deg(std.pi) // 180"), + flow_typing: None, + }, + StdFn { + name: "floor", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Floor.", + example: None, + flow_typing: None, + }, + StdFn { + name: "ceil", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Ceiling.", + example: None, + flow_typing: None, + }, + StdFn { + name: "round", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Round to nearest integer.", + example: None, + flow_typing: None, + }, + StdFn { + name: "modulo", + params: &[Param::req("x", NUM), Param::req("y", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Modulo operation.", + example: None, + flow_typing: None, + }, + StdFn { + name: "mod", + params: &[Param::req("a", NUM), Param::req("b", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Modulo (same as `a % b`).", + example: None, + flow_typing: None, + }, + StdFn { + name: "mantissa", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns mantissa of floating point number.", + example: None, + flow_typing: None, + }, + StdFn { + name: "exponent", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns exponent of floating point number.", + example: None, + flow_typing: None, + }, + StdFn { + name: "bigint", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts value to arbitrary-precision integer.", + example: None, + flow_typing: None, + }, + StdFn { + name: "xor", + params: &[Param::req("a", BOOL), Param::req("b", BOOL)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Logical XOR.", + example: None, + flow_typing: None, + }, + StdFn { + name: "xnor", + params: &[Param::req("a", BOOL), Param::req("b", BOOL)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Logical XNOR.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sum", + params: &[Param::req("arr", ARR_NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Sum of array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "minArray", + params: &[Param::req("arr", ARR), Param::opt("keyF", FUNC)], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Minimum of array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "maxArray", + params: &[Param::req("arr", ARR), Param::opt("keyF", FUNC)], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Maximum of array.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Encoding/hashing + // ========================================================================== + StdFn { + name: "base64", + params: &[Param::req("input", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Base64 encodes string or bytes.", + example: None, + flow_typing: None, + }, + StdFn { + name: "base64Decode", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Base64 decodes to string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "base64DecodeBytes", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(ARR_NUM), + variadic: false, + doc: "Base64 decodes to bytes.", + example: None, + flow_typing: None, + }, + StdFn { + name: "encodeUTF8", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(ARR_NUM), + variadic: false, + doc: "Encodes string to UTF-8 byte array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "decodeUTF8", + params: &[Param::req("arr", ARR_NUM)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Decodes UTF-8 byte array to string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "md5", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "MD5 hash as hex string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sha1", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "SHA-1 hash as hex string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sha256", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "SHA-256 hash as hex string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sha512", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "SHA-512 hash as hex string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sha3", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "SHA-3 hash as hex string.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Manifest functions + // ========================================================================== + StdFn { + name: "manifestJson", + params: &[Param::req("value", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts value to JSON string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestJsonEx", + params: &[ + Param::req("value", ANY), + Param::opt("indent", STR), + Param::opt("newline", STR), + Param::opt("key_val_sep", STR), + ], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "JSON with custom formatting.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestJsonMinified", + params: &[Param::req("value", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Minified JSON.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestYamlDoc", + params: &[ + Param::req("value", ANY), + Param::opt("indent_array_in_object", BOOL), + Param::opt("quote_keys", BOOL), + ], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to YAML.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestYamlStream", + params: &[ + Param::req("value", ANY), + Param::opt("indent_array_in_object", BOOL), + Param::opt("c_document_end", BOOL), + Param::opt("quote_keys", BOOL), + ], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "YAML stream.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestPython", + params: &[Param::req("value", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to Python literal.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestPythonVars", + params: &[Param::req("conf", OBJ)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Python variable assignments.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestIni", + params: &[Param::req("ini", OBJ)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to INI format.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestToml", + params: &[Param::req("value", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to TOML.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestTomlEx", + params: &[Param::req("value", ANY), Param::req("indent", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to TOML with custom indent.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestXmlJsonml", + params: &[Param::req("value", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts JSONML to XML.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Regex + // ========================================================================== + StdFn { + name: "regexQuoteMeta", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes regex metacharacters in string.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Path/import + // ========================================================================== + StdFn { + name: "resolvePath", + params: &[Param::req("from", STR), Param::req("to", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Resolves a relative path.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Miscellaneous + // ========================================================================== + StdFn { + name: "trace", + params: &[Param::req("str", STR), Param::req("rest", ANY)], + return_spec: ReturnSpec::SameAsArg(1), + variadic: false, + doc: "Prints `str` and returns `rest`.", + example: Some("std.trace(\"debug\", value)"), + flow_typing: None, + }, + StdFn { + name: "assertEqual", + params: &[Param::req("a", ANY), Param::req("b", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Asserts `a == b`, returns true.", + example: None, + flow_typing: None, + }, + StdFn { + name: "native", + params: &[Param::req("name", STR)], + return_spec: ReturnSpec::Fixed(FUNC), + variadic: false, + doc: "Calls a native extension function.", + example: None, + flow_typing: None, + }, + StdFn { + name: "extVar", + params: &[Param::req("name", STR)], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Gets external variable value.", + example: None, + flow_typing: None, + }, + StdFn { + name: "thisFile", + params: &[], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Returns the current file path.", + example: None, + flow_typing: None, + }, +]; + +/// Look up a stdlib function by name. +#[must_use] +pub fn get_fn(name: &str) -> Option<&'static StdFn> { + FNS.iter().find(|f| f.name == name) +} + +/// Get flow typing info for a function by name. +#[must_use] +pub fn get_flow_typing(name: &str) -> Option<&'static FlowTyping> { + get_fn(name).and_then(|f| f.flow_typing.as_ref()) +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeSet; + + use jrsonnet_rowan_parser::parse; + + use super::*; + + const OFFICIAL_FUNCTIONS: &str = include_str!("../data/official_functions.txt"); + const RJSONNET_FUNCTIONS: &str = include_str!("../data/rjsonnet_functions.txt"); + const JRSONNET_LEGACY_FUNCTIONS: &str = include_str!("../data/jrsonnet_legacy_functions.txt"); + + fn actual_function_names() -> BTreeSet { + FNS.iter().map(|f| f.name.to_string()).collect() + } + + fn parse_name_set(raw: &str) -> BTreeSet { + raw.lines() + .map(str::trim) + .filter(|line| !line.is_empty()) + .map(str::to_string) + .collect() + } + + fn missing_names(actual: &BTreeSet, expected: &BTreeSet) -> Vec { + expected.difference(actual).cloned().collect() + } + + #[test] + fn test_get_fn() { + let f = get_fn("isNumber").unwrap(); + let expected_params = [Param::req("v", ANY)]; + assert_eq!( + ( + f.name, + f.params, + f.return_spec, + f.variadic, + f.doc, + f.example, + f.flow_typing + ), + ( + "isNumber", + &expected_params[..], + ReturnSpec::Fixed(BOOL), + false, + "Returns true if `v` is a number.", + None, + Some(FlowTyping { + param_idx: 0, + narrows_to: NarrowsTo::Number, + totality: Totality::Total, + }) + ) + ); + } + + #[test] + fn test_get_flow_typing() { + let ft = get_flow_typing("isNumber").unwrap(); + assert_eq!(ft.narrows_to, NarrowsTo::Number); + assert_eq!(ft.totality, Totality::Total); + } + + #[test] + fn test_partial_predicate() { + let ft = get_flow_typing("isInteger").unwrap(); + assert_eq!(ft.narrows_to, NarrowsTo::Number); + assert_eq!(ft.totality, Totality::Partial); + } + + #[test] + fn test_no_flow_typing() { + assert!(get_flow_typing("length").is_none()); + } + + #[test] + fn test_all_fns_have_valid_params() { + for f in FNS { + // Check param names are non-empty + for p in f.params { + assert!(!p.name.is_empty(), "{} has empty param name", f.name); + } + } + } + + #[test] + fn test_all_fns_have_non_empty_docs() { + for f in FNS { + assert!( + !f.doc.trim().is_empty(), + "{} has an empty documentation string", + f.name + ); + } + } + + #[test] + fn test_examples_parse_cleanly() { + for f in FNS { + let Some(example) = f.example else { + continue; + }; + let (_, errors) = parse(example); + assert!( + errors.is_empty(), + "example for {} must parse cleanly; errors={errors:?}; example={example}", + f.name + ); + } + } + + #[test] + fn test_covers_official_stdlib_functions() { + let actual = actual_function_names(); + let official = parse_name_set(OFFICIAL_FUNCTIONS); + let missing = missing_names(&actual, &official); + assert_eq!(missing, Vec::::new()); + } + + #[test] + fn test_covers_rjsonnet_stdlib_functions() { + let actual = actual_function_names(); + let rjsonnet = parse_name_set(RJSONNET_FUNCTIONS); + let missing = missing_names(&actual, &rjsonnet); + assert_eq!(missing, Vec::::new()); + } + + #[test] + fn test_preserves_jrsonnet_legacy_stdlib_functions() { + let actual = actual_function_names(); + let legacy = parse_name_set(JRSONNET_LEGACY_FUNCTIONS); + let missing = missing_names(&actual, &legacy); + assert_eq!(missing, Vec::::new()); + } +} diff --git a/crates/jrsonnet-stdlib/src/manifest/yaml.rs b/crates/jrsonnet-stdlib/src/manifest/yaml.rs index c2f46bd5..7b9fe96c 100644 --- a/crates/jrsonnet-stdlib/src/manifest/yaml.rs +++ b/crates/jrsonnet-stdlib/src/manifest/yaml.rs @@ -3,7 +3,8 @@ use std::{borrow::Cow, fmt::Write}; use jrsonnet_evaluator::{ bail, in_description_frame, manifest::{escape_string_json_buf, ManifestFormat}, - Result, ResultExt, Val, + val::ArrValue, + ObjValue, Result, ResultExt, Val, }; pub struct YamlFormat<'s> { @@ -188,6 +189,158 @@ fn manifest_yaml_ex(val: &Val, options: &YamlFormat<'_>) -> Result { Ok(out) } +fn manifest_yaml_array_buf( + a: &ArrValue, + buf: &mut String, + cur_padding: &mut String, + options: &YamlFormat<'_>, +) -> Result<()> { + let mut had_items = false; + for (i, item) in a.iter().enumerate() { + had_items = true; + let item = item.with_description(|| format!("elem <{i}> evaluation"))?; + if i != 0 { + buf.push('\n'); + buf.push_str(cur_padding); + } + buf.push('-'); + match &item { + Val::Arr(a) if !a.is_empty() => { + // Nested arrays need a newline and extra indentation + buf.push('\n'); + buf.push_str(cur_padding); + buf.push_str(&options.padding); + } + _ => buf.push(' '), + } + // For nested arrays, add padding to cur_padding + let prev_len = cur_padding.len(); + if let Val::Arr(a) = &item { + if !a.is_empty() { + cur_padding.push_str(&options.padding); + } + } + // Objects in arrays need special handling: their fields should + // align with the first field (after "- "), but nested structures + // should not inherit this offset + let is_object_in_array = matches!(&item, Val::Obj(o) if !o.is_empty()); + in_description_frame( + || format!("elem <{i}> manifestification"), + || manifest_yaml_ex_buf(&item, buf, cur_padding, options, is_object_in_array), + )?; + cur_padding.truncate(prev_len); + } + if !had_items { + buf.push_str("[]"); + } + Ok(()) +} + +fn manifest_yaml_object_buf( + o: &ObjValue, + buf: &mut String, + cur_padding: &mut String, + options: &YamlFormat<'_>, + in_array_context: bool, +) -> Result<()> { + let mut had_fields = false; + // Store the base padding BEFORE any in_array_context adjustment. + let base_padding_len = cur_padding.len(); + + // For key alignment: if this object is an array element, keys (except the first) + // need 2 extra spaces to align with the first key (which appears after "- "). + // This offset is ONLY for key alignment, NOT for nested content. + let key_padding = if in_array_context { + let mut kp = cur_padding.clone(); + kp.push_str(" "); + kp + } else { + cur_padding.clone() + }; + + for (i, (key, value)) in o + .iter( + #[cfg(feature = "exp-preserve-order")] + options.preserve_order, + ) + .enumerate() + { + had_fields = true; + let value = value.with_description(|| format!("field <{key}> evaluation"))?; + if i != 0 { + buf.push('\n'); + buf.push_str(&key_padding); + } + if !options.quote_keys && !yaml_needs_quotes(&key) { + buf.push_str(&key); + } else { + escape_string_json_buf(&key, buf); + } + buf.push(':'); + + // For nested content (arrays/objects as values), we need to account for + // whether this object is an array element. If so, the first field starts + // at cur_padding + 2 (after "- "), so nested content should be relative + // to that position. + // + // When in_array_context, we add +2 to account for the "- " prefix, but we + // DON'T add arr_element_padding for arrays - the +2 offset already provides + // the correct indentation. For non-array context, we DO add arr_element_padding. + let content_base = if in_array_context { + let mut base = cur_padding.clone(); + base.push_str(" "); + base + } else { + cur_padding.clone() + }; + + let prev_len = cur_padding.len(); + match &value { + Val::Arr(a) if !a.is_empty() => { + buf.push('\n'); + // For arrays in object fields, use content_base (which includes the + // in_array_context alignment) plus arr_element_padding. + buf.push_str(&content_base); + buf.push_str(&options.arr_element_padding); + // Set cur_padding for nested content + cur_padding.clear(); + cur_padding.push_str(&content_base); + cur_padding.push_str(&options.arr_element_padding); + } + Val::Obj(o) if !o.is_empty() => { + buf.push('\n'); + buf.push_str(&content_base); + buf.push_str(&options.padding); + // Set cur_padding for nested content + cur_padding.clear(); + cur_padding.push_str(&content_base); + cur_padding.push_str(&options.padding); + } + _ => { + buf.push(' '); + // Set cur_padding for block scalar indentation in array context + // This ensures block scalar content is indented relative to key position + if in_array_context { + cur_padding.clear(); + cur_padding.push_str(&content_base); + } + } + } + in_description_frame( + || format!("field <{key}> manifestification"), + || manifest_yaml_ex_buf(&value, buf, cur_padding, options, false), + )?; + cur_padding.truncate(prev_len); + } + // Restore cur_padding to original value + cur_padding.truncate(base_padding_len); + if !had_fields { + buf.push_str("{}"); + } + + Ok(()) +} + #[allow(clippy::too_many_lines)] fn manifest_yaml_ex_buf( val: &Val, @@ -256,142 +409,8 @@ fn manifest_yaml_ex_buf( } #[cfg(feature = "exp-bigint")] Val::BigInt(n) => write!(buf, "{}", *n).unwrap(), - Val::Arr(a) => { - let mut had_items = false; - for (i, item) in a.iter().enumerate() { - had_items = true; - let item = item.with_description(|| format!("elem <{i}> evaluation"))?; - if i != 0 { - buf.push('\n'); - buf.push_str(cur_padding); - } - buf.push('-'); - match &item { - Val::Arr(a) if !a.is_empty() => { - // Nested arrays need a newline and extra indentation - buf.push('\n'); - buf.push_str(cur_padding); - buf.push_str(&options.padding); - } - _ => buf.push(' '), - } - // For nested arrays, add padding to cur_padding - let prev_len = cur_padding.len(); - if let Val::Arr(a) = &item { - if !a.is_empty() { - cur_padding.push_str(&options.padding); - } - } - // Objects in arrays need special handling: their fields should - // align with the first field (after "- "), but nested structures - // should not inherit this offset - let is_object_in_array = matches!(&item, Val::Obj(o) if !o.is_empty()); - in_description_frame( - || format!("elem <{i}> manifestification"), - || manifest_yaml_ex_buf(&item, buf, cur_padding, options, is_object_in_array), - )?; - cur_padding.truncate(prev_len); - } - if !had_items { - buf.push_str("[]"); - } - } - Val::Obj(o) => { - let mut had_fields = false; - // Store the base padding BEFORE any in_array_context adjustment. - let base_padding_len = cur_padding.len(); - - // For key alignment: if this object is an array element, keys (except the first) - // need 2 extra spaces to align with the first key (which appears after "- "). - // This offset is ONLY for key alignment, NOT for nested content. - let key_padding = if in_array_context { - let mut kp = cur_padding.clone(); - kp.push_str(" "); - kp - } else { - cur_padding.clone() - }; - - for (i, (key, value)) in o - .iter( - #[cfg(feature = "exp-preserve-order")] - options.preserve_order, - ) - .enumerate() - { - had_fields = true; - let value = value.with_description(|| format!("field <{key}> evaluation"))?; - if i != 0 { - buf.push('\n'); - buf.push_str(&key_padding); - } - if !options.quote_keys && !yaml_needs_quotes(&key) { - buf.push_str(&key); - } else { - escape_string_json_buf(&key, buf); - } - buf.push(':'); - - // For nested content (arrays/objects as values), we need to account for - // whether this object is an array element. If so, the first field starts - // at cur_padding + 2 (after "- "), so nested content should be relative - // to that position. - // - // When in_array_context, we add +2 to account for the "- " prefix, but we - // DON'T add arr_element_padding for arrays - the +2 offset already provides - // the correct indentation. For non-array context, we DO add arr_element_padding. - let content_base = if in_array_context { - let mut base = cur_padding.clone(); - base.push_str(" "); - base - } else { - cur_padding.clone() - }; - - let prev_len = cur_padding.len(); - match &value { - Val::Arr(a) if !a.is_empty() => { - buf.push('\n'); - // For arrays in object fields, use content_base (which includes the - // in_array_context alignment) plus arr_element_padding. - buf.push_str(&content_base); - buf.push_str(&options.arr_element_padding); - // Set cur_padding for nested content - cur_padding.clear(); - cur_padding.push_str(&content_base); - cur_padding.push_str(&options.arr_element_padding); - } - Val::Obj(o) if !o.is_empty() => { - buf.push('\n'); - buf.push_str(&content_base); - buf.push_str(&options.padding); - // Set cur_padding for nested content - cur_padding.clear(); - cur_padding.push_str(&content_base); - cur_padding.push_str(&options.padding); - } - _ => { - buf.push(' '); - // Set cur_padding for block scalar indentation in array context - // This ensures block scalar content is indented relative to key position - if in_array_context { - cur_padding.clear(); - cur_padding.push_str(&content_base); - } - } - } - in_description_frame( - || format!("field <{key}> manifestification"), - || manifest_yaml_ex_buf(&value, buf, cur_padding, options, false), - )?; - cur_padding.truncate(prev_len); - } - // Restore cur_padding to original value - cur_padding.truncate(base_padding_len); - if !had_fields { - buf.push_str("{}"); - } - } + Val::Arr(a) => manifest_yaml_array_buf(a, buf, cur_padding, options)?, + Val::Obj(o) => manifest_yaml_object_buf(o, buf, cur_padding, options, in_array_context)?, Val::Func(_) => bail!("tried to manifest function"), } Ok(()) diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md new file mode 100644 index 00000000..f003b7b9 --- /dev/null +++ b/docs/lsp/ARCHITECTURE.md @@ -0,0 +1,492 @@ +# jrsonnet-lsp Architecture + +This document describes the current architecture of the Jsonnet language server +implemented by `crates/jrsonnet-lsp` and companion crates. + +## Scope + +The LSP implementation is split across multiple crates. At a high level: + +- `jrsonnet-lsp` owns process lifecycle, LSP transport, request/notification + routing, configuration updates, and async diagnostics orchestration. +- `jrsonnet-lsp-handlers` owns most request handlers (hover, completion, + references, rename, and so on). +- `jrsonnet-lsp-inference` owns `TypeAnalysis`, cross-file type caching, and + dependency-aware analysis via `TypeProvider`. +- `jrsonnet-lsp-import` owns import parsing and dependency graph maintenance. +- `jrsonnet-lsp-check` owns lint and type-check diagnostics. +- `jrsonnet-lsp-document`, `jrsonnet-lsp-scope`, `jrsonnet-lsp-stdlib`, and + `jrsonnet-lsp-types` provide foundational utilities used throughout. + +## Crate Graph + +The rough dependency direction is: + +```text +jrsonnet-lsp-types + ├─ jrsonnet-lsp-stdlib + ├─ jrsonnet-lsp-document + ├─ jrsonnet-lsp-import + ├─ jrsonnet-lsp-scope + └─ jrsonnet-lsp-inference + └─ jrsonnet-lsp-check + └─ jrsonnet-lsp-handlers + └─ jrsonnet-lsp +``` + +`jrsonnet-lsp` also depends on `jrsonnet-evaluator` for evaluation-based runtime +diagnostics and command execution. + +## Runtime Components + +`Server` in `crates/jrsonnet-lsp/src/server.rs` owns shared state: + +- `connection: lsp_server::Connection` +- `documents: SharedDocumentManager` +- `import_graph: Arc>` +- `global_types: Arc` +- `type_cache: SharedTypeCache` +- `config: SharedConfig` +- `evaluator: Option>` +- `diagnostics: AsyncDiagnostics` +- `inflight_requests: InflightRequests` +- async request response channels +- shutdown flag + +`DocumentManager` keeps open documents in a concurrent map and keeps recently +closed documents in an LRU cache. It can also read unopened files from disk when +needed by cross-file operations. + +## Initialization + +Startup flow: + +1. Accept `initialize`. +2. Parse `initializationOptions` into `ServerConfig`. +3. Configure runtime components (`Evaluator`, `AsyncDiagnostics`). +4. Send `InitializeResult` with server capabilities. +5. Wait for `initialized` notification. +6. If client supports dynamic watched-files registration, send + `client/registerCapability` for `workspace/didChangeWatchedFiles` with + `**/*.jsonnet`, `**/*.libsonnet`, and `**/*.json` watchers. When the client + also supports relative patterns, watchers are registered relative to + workspace roots. +7. Schedule background bootstrap indexing for initialize workspace roots + (`workspaceFolders`, `rootUri`, `rootPath`) scanning `*.jsonnet`, + `*.libsonnet`, and `*.json`. +8. Persist normalized workspace roots for runtime features that need + workspace-scoped execution context (for example formatting contract mode). +9. Enter the main loop immediately while bootstrap continues asynchronously. + +Entry point: `run_stdio()` in `crates/jrsonnet-lsp/src/server.rs`. + +## Main Loop Model + +The main loop multiplexes three channels with `crossbeam_channel::select!`: + +- LSP transport messages. +- Completed async diagnostics results. +- Completed async request responses. + +This keeps the message loop responsive while expensive analysis work runs off +thread. + +## Request Routing Model + +### Synchronous request handlers + +Handled directly on the server thread: + +- `textDocument/documentSymbol` +- `textDocument/documentHighlight` +- `textDocument/codeAction` +- `textDocument/signatureHelp` +- `textDocument/prepareRename` +- `codeLens/resolve` +- `shutdown` + +### Asynchronous request handlers + +Dispatched via `spawn_async_response` (Rayon): + +- `textDocument/definition` +- `textDocument/declaration` +- `textDocument/implementation` +- `textDocument/hover` +- `textDocument/inlayHint` +- `textDocument/completion` +- `textDocument/references` +- `workspace/symbol` +- `textDocument/rename` +- `textDocument/codeLens` +- `textDocument/formatting` +- `textDocument/rangeFormatting` +- `textDocument/semanticTokens/full` +- `textDocument/semanticTokens/range` +- `workspace/executeCommand` + +Async handlers run through `AsyncRequestContext` +(`crates/jrsonnet-lsp/src/server/async_requests.rs`), which centralizes access +to documents, import graph, type cache, config, and dependency-aware analysis. +`workspace/symbol` uses this context to search tracked workspace files from the +import graph, not just currently open buffers. Results are ranked by match +quality (exact, then prefix, then substring) and capped to 128 entries. + +Both sync and async request routing are organized one file per handler under: + +- `crates/jrsonnet-lsp/src/server/requests/sync_handlers/` +- `crates/jrsonnet-lsp/src/server/requests/async_handlers/` + +`AsyncRequestContext` implementation details are split one file per feature +under `crates/jrsonnet-lsp/src/server/async_requests/`. + +### In-Flight Request Boundary + +`Server` routes request lifecycle through +`crates/jrsonnet-lsp/src/protocol/inflight_requests.rs`. + +This layer: + +- registers incoming request IDs +- enforces typed request handling via `begin::()` +- sends typed success/error responses via `send_ok` and `send_err` +- has an explicit unknown-method fallback (`begin_unknown`, `send_unknown_err`) +- supports cancellation of pending requests with `cancel_request` + (`RequestCanceled` error) +- accepts async worker responses only for currently pending request IDs + (`send_inflight_response`) +- tracks outgoing request metadata so response errors can be logged with method + context + +The main loop uses this boundary for all request responses after initialization. + +## Advertised LSP Capabilities + +`server_capabilities()` currently advertises: + +- incremental text sync with open/close and save notifications +- definition, declaration, implementation, type definition, hover, document + symbols, document highlights +- completion (trigger `.`) +- signature help (triggers `(` and `,`) +- formatting and range formatting +- references +- workspace symbol search +- rename with `prepareRename` +- semantic tokens (full document and range) +- code actions (quick-fix and source.fixAll kinds) +- code lens (resolve enabled) +- execute command (five command IDs) + +For the canonical list, see `crates/jrsonnet-lsp/src/server.rs`. + +## Notification Handling + +Implemented notifications: + +- `textDocument/didOpen` +- `textDocument/didChange` +- `textDocument/didSave` +- `textDocument/didClose` +- `workspace/didChangeConfiguration` +- `workspace/didChangeWatchedFiles` +- `$/cancelRequest` +- `exit` + +### Open/change path + +For open/change events, the server: + +1. Updates document contents/version. +2. Invalidates type cache for changed file and transitive importers. +3. Rebuilds import graph entries for that file. +4. Schedules diagnostics. + +### Close path + +For close events, the server: + +1. Moves the file from open docs to closed-cache storage. +2. Invalidates the file and dependent type cache entries. +3. Rebuilds import graph entries from on-disk content for the closed file. +4. Schedules diagnostics for currently open importers of that file. +5. Publishes empty diagnostics for the closed URI. + +### Save path + +For save events, the server: + +1. Optionally updates open-document contents when `didSave` includes `text`. +2. Invalidates type cache entries for the file and dependent importers. +3. Refreshes import graph entries for the saved file. +4. Schedules diagnostics for the saved file and currently open importers. + +### Watched file path + +For watched file events, the server invalidates cache and updates graph even for +unopened files. It also schedules diagnostics for open importers to refresh +cross-file diagnostics after external file changes. + +## Import Resolution Strategy + +Import resolution is used in multiple places (analysis, definition, commands). +The common resolution order is: + +1. relative to the importing file's directory +2. each configured `jpath` entry in order + +That ordering is applied through a shared boundary API: +`jrsonnet_lsp_import::ImportResolution`. Server graph updates, async +diagnostics, and custom eval operations all resolve and parse imports through +this single type so import behavior stays consistent across subsystems. + +Cross-file navigation and reference/rename paths then use the graph's resolved +entries as the source of truth instead of re-resolving import strings +independently. + +Import extraction for graph updates uses a two-stage strategy: + +1. AST-based import discovery for complete syntax (captures binding names when + available). +2. Token-stream fallback for syntax-broken files (for example unterminated + import strings), so unresolved-import diagnostics and dependency updates do + not silently miss imports while the user is mid-edit. + +Navigation semantics: + +- `textDocument/declaration`: nearest lexical declaration ("where this name is + introduced in the current scope"). +- `textDocument/definition`: canonical origin ("what this symbol resolves to + after following aliases/imports"). +- `textDocument/typeDefinition`: same target as `definition` in Jsonnet (symbols + do not have separate nominal type declarations). +- `textDocument/implementation`: value/body expression ("how this symbol is + computed"). + +In practice, declaration is usually the best jump for local editing, definition +is best for understanding provenance across aliases/imports, and implementation +is best for inspecting runtime value logic. + +Import graph update path: `update_import_graph` in +`crates/jrsonnet-lsp/src/server.rs`. + +`workspace/didChangeWatchedFiles` changes update the changed file immediately. +For create/delete events, the server also reindexes tracked files so unresolved +imports can transition to resolved (and vice versa) without requiring manual +file reopen. + +## Diagnostics Architecture + +Diagnostics run in a dedicated background worker (`AsyncDiagnostics` in +`crates/jrsonnet-lsp/src/async_diagnostics.rs`): + +- Requests are debounced per-file (`500 ms`). +- Stale requests/results are discarded by sequence numbers. +- Worker reconstructs `Document` from scheduled text/version. +- Worker parses import occurrences and resolves them against scheduled import + roots. +- Worker uses `TypeProvider` + `TypeCache` + `ImportGraph` for dependency-aware + analysis. +- Result is sent back as `PublishDiagnosticsParams`. + +Diagnostic composition (`crates/jrsonnet-lsp/src/handlers/diagnostics.rs`): + +1. Syntax diagnostics from parser errors. +2. Unresolved-import diagnostics from import occurrences (AST first with a + token-stream fallback for syntax-broken imports). +3. Lint/type diagnostics from `jrsonnet-lsp-check` when lint is enabled and + parse succeeded. +4. Evaluation diagnostic from `Evaluator` (optional) when parse succeeded. + +Evaluation diagnostics use `analysis/eval.rs` and can optionally apply +Tanka-aware `jpath` expansion via `analysis/tanka.rs`. + +## Cross-file Analysis and Caching + +`TypeProvider` in `jrsonnet-lsp-inference` ensures imports are analyzed first: + +- Uses import graph dependency ordering (`process_with_dependencies`). +- Resolves import maps only for files in the transitive dependency closure of + the requested root (not the full workspace graph). +- Uses `TypeCache` to reuse previously inferred top-level types. +- Uses `CachingImportResolver` so import expressions can consult cached types. + +Cache invalidation is dependency-aware through +`invalidate_type_cache_with_dependents`. + +## Configuration and Reconfiguration + +`ServerConfig` supports initialization options and live updates from +`workspace/didChangeConfiguration`. + +Important behavior in `on_did_change_configuration`: + +- Runtime-affecting changes (`jpath`, eval diagnostics mode, Tanka mode) trigger + runtime component rebuild. +- Runtime-affecting changes clear the shared type cache and refresh import graph + for all tracked files (open and closed-cache graph entries). +- Runtime changes and lint toggle changes both trigger diagnostic rescheduling + for open files. +- Code action policy updates (`codeActions.removeUnused`, + `codeActions.removeUnusedComments`) are applied immediately and do not require + runtime rebuild. + +## Configuration Surface + +`ServerConfig` (`crates/jrsonnet-lsp/src/config.rs`) includes: + +- `jpath` +- `ext_vars` +- `ext_code` +- `enable_eval_diagnostics` +- `enable_lint_diagnostics` +- `resolve_paths_with_tanka` +- `formatting` +- `code_actions` +- `log_level` + +`formatting` currently includes: + +- built-in formatter options (`indent`, `max_blank_lines`, string/comment + style, padding toggles) + +Configuration can arrive via initialization options or +`workspace/didChangeConfiguration` settings payloads. The update logic accepts +both flat and namespaced settings (`jsonnet`, `jsonnet-language-server`). + +## Timeline Test Harness + +The scenario harness is split into a dedicated crate plus fixture-driven tests: + +- `crates/jrsonnet-lsp-scenario/src/scenario.rs` +- `crates/jrsonnet-lsp-scenario/src/scenario_script/` +- `crates/jrsonnet-lsp-scenario/src/scenario_runner/` +- `crates/jrsonnet-lsp/tests/e2e_scenario_tests.rs` +- `crates/jrsonnet-lsp/tests/scenarios/**/*.yaml` + +`jrsonnet-lsp-scenario` provides: + +- a typed executable timeline model +- a YAML DSL compiler (`parse_scenario_yaml`) +- an in-memory LSP runner with async diagnostics settle barriers +- rstest fixture helpers for file-based scenario discovery + +The YAML DSL is marker-driven: + +- inline source markers define reusable positions/ranges in file content: + - `[[name:text]]` for named ranges + - `((name:|x))`, `((name:x|))`, `((name:|))` for named cursor positions +- request positions use marker names (`at: markerName`) +- request ranges use marker names (`range: markerName`) or shorthand + (`at` + `text`/`len`) +- request/expect pairs use optional aliases (`as` / `request`) instead of raw + numeric IDs +- `diagnosticsSettled` defaults are `timeout_ms: 1000` and `idle_ms: 50` +- `create.files` keeps scenarios self-contained and path-relative + +The compiled timeline supports cross-file edits and notifications +(`writeFile`, `deleteFile`, `notifyWatchedFiles`) and request/response +assertions for all implemented handlers. + +### Scenario Script Examples + +#### Diagnostics warning + +```yaml +steps: + - step: create + files: + main.jsonnet: local [[unused:x]] = 1; 42 + - step: diagnosticsSettled + - step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: unused + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable +``` + +#### Type error diagnostic + +```yaml +steps: + - step: create + files: + main.jsonnet: | + local x = 1; + [[badAccess:x.foo]] + - step: diagnosticsSettled + - step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: badAccess + text: x.foo + severity: error + message: field access on non-object type +``` + +#### Code action request/expect + +```yaml +steps: + - step: create + files: + main.jsonnet: ((root:|))local [[unused:x]] = 1; 42 + - step: requestCodeAction + as: fixUnused + file: main.jsonnet + at: root + text: local x = 1; 42 + diagnostics: + - at: unused + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + - step: expectCodeAction + request: fixUnused + result: + - title: Prefix `x` with `_` + kind: quickfix + isPreferred: true + diagnostics: + - at: unused + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + main.jsonnet: + - at: unused + text: x + replace: _x +``` + +## Execute Commands + +Advertised commands: + +- `jrsonnet.evalFile` +- Custom requests: + - `jrsonnet/evalFile` + - `jrsonnet/evalExpression` + - `jrsonnet/findTransitiveImporters` + +Unknown command IDs are returned as explicit LSP `InvalidParams` errors. + +## Concurrency Strategy + +Concurrency is intentionally split: + +- Fast protocol/state transitions stay on the main loop thread. +- Heavier read/analysis operations run in Rayon jobs. +- Diagnostics run on a dedicated worker thread with debouncing. +- Shared mutable structures use `Arc` + `RwLock`/concurrent maps where needed. + +This balance keeps latency low for editor interactions while preserving +cross-file correctness. diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md new file mode 100644 index 00000000..5b9d8b8c --- /dev/null +++ b/docs/lsp/HANDLERS.md @@ -0,0 +1,375 @@ +# jrsonnet-lsp Handler Guide + +This document describes current LSP request handling across +`crates/jrsonnet-lsp` and `crates/jrsonnet-lsp-handlers`. + +## Scope and Ownership + +- `jrsonnet-lsp` owns transport, method dispatch, async orchestration, and + diagnostics publishing. +- `jrsonnet-lsp-handlers` owns most feature logic for request methods. +- `jrsonnet-lsp` also owns diagnostics composition + (`crates/jrsonnet-lsp/src/handlers/diagnostics.rs`) and execute-command + orchestration (`crates/jrsonnet-lsp/src/server/async_requests.rs`). + +## Module Inventory + +Public handler modules in `crates/jrsonnet-lsp-handlers/src`: + +- `code_action.rs` +- `code_lens.rs` +- `completion/` +- `definition.rs` +- `document_highlight.rs` +- `formatting.rs` +- `hover.rs` +- `inlay_hint.rs` +- `references.rs` +- `rename.rs` +- `semantic_tokens.rs` +- `signature_help.rs` +- `symbols.rs` + +## Request Method Matrix + +Current request routing in `crates/jrsonnet-lsp/src/server.rs`: + +| LSP method | Server entry point | Implementation owner | Execution model | Uses `TypeAnalysis` | +| ----------------------------------- | ---------------------------------- | ------------------------------------------------- | --------------- | ------------------- | +| `textDocument/documentSymbol` | `on_document_symbol` | handlers crate (`document_symbols`) | sync | no | +| `textDocument/definition` | async context (`goto_definition`) | mixed: handlers + server import graph resolution | async | no | +| `textDocument/typeDefinition` | async (`goto_type_definition`) | mixed: handlers + server import graph resolution | async | no | +| `textDocument/declaration` | async context (`goto_declaration`) | mixed: handlers + server import graph resolution | async | no | +| `textDocument/implementation` | async (`goto_implementation`) | mixed: handlers + server import graph resolution | async | no | +| `textDocument/hover` | async context (`hover`) | handlers crate (`hover`) | async | yes | +| `textDocument/documentHighlight` | `on_document_highlight` | handlers crate (`document_highlights`) | sync | no | +| `textDocument/inlayHint` | async context (`inlay_hints`) | handlers crate (`inlay_hints`) | async | yes | +| `textDocument/codeAction` | `on_code_action` | handlers crate (`code_actions`) | sync | no | +| `textDocument/completion` | async context (`completion`) | handlers crate (`completion_with_import_roots`) | async | yes | +| `textDocument/signatureHelp` | `on_signature_help` | handlers crate (`signature_help`) | sync | no | +| `textDocument/formatting` | `on_formatting` | handlers crate (`format_document_with_config`) | sync | no | +| `textDocument/rangeFormatting` | `on_range_formatting` | handlers crate (`format_document_range_with_config`) | sync | no | +| `textDocument/references` | async context (`references`) | mixed: handlers + server import graph merge | async | no | +| `workspace/symbol` | async context (`workspace_symbol`) | handlers crate (`workspace_symbols_for_document`) | async | no | +| `textDocument/prepareRename` | `on_prepare_rename` | handlers crate (`prepare_rename`) | sync | no | +| `textDocument/rename` | async context (`rename`) | handlers crate (`rename_cross_file`) | async | no | +| `textDocument/semanticTokens/full` | `on_semantic_tokens_full` | handlers crate (`semantic_tokens`) | sync | no | +| `textDocument/semanticTokens/range` | `on_semantic_tokens_range` | handlers crate (`semantic_tokens_range`) | sync | no | +| `textDocument/codeLens` | async context (`code_lens`) | handlers crate (`code_lens`) | async | yes | +| `codeLens/resolve` | `resolve_code_lens` | handlers crate (`resolve_code_lens`) | sync | no | +| `workspace/executeCommand` | async context (`execute_command`) | server async context | async | no | +| `shutdown` | direct in `handle_request` | server | sync | no | + +Async requests are sent back over the server's async response channel after +worker completion. All request handlers enter through the `InflightRequests` +protocol boundary, which tracks pending IDs, uses typed request handles for +method-specific responses, and supports request cancellation. + +## Handler Details + +### Code Action + +File: `crates/jrsonnet-lsp-handlers/src/code_action.rs` + +- Produces quick-fix and source fix-all actions from diagnostic context. +- Current unused-variable actions include: + `Prefix with _`, `Remove unused binding `, and + `Remove all unused bindings`. +- Remove-unused edits support: + top-level `local` statements (single and multi-binding) and object-local + members in object bodies. +- `CodeActionConfig.remove_unused` supports policy modes: + `all`, `importBindings`, and `nonImportBindings`. +- `CodeActionConfig.remove_unused_comments` supports comment retention modes: + `none`, `above`, `below`, and `all`. +- Uses current document plus selected range and diagnostics from request params. +- Returned through sync `textDocument/codeAction`. + +### Code Lens + +File: `crates/jrsonnet-lsp-handlers/src/code_lens.rs` + +`CodeLensConfig` supports: + +- `show_types` + +Current server path builds `CodeLensConfig::all()` in async context and passes +computed `TypeAnalysis`. + +`codeLens/resolve` is a synchronous pass-through over +`handlers::resolve_code_lens`, which currently preserves lens payloads. + +Lens categories: + +- inferred type lenses for selected bindings + +The server augments these handler lenses with a custom-operation evaluate lens +(`jrsonnet.evalFile`) so generic clients can run file evaluation via the code +lens execute-command bridge. + +### Completion + +Files: + +- `completion/mod.rs` +- `completion/stdlib.rs` +- `completion/locals.rs` +- `completion/fields.rs` +- `completion/imports.rs` +- `completion/helpers.rs` + +Completion sources include: + +- stdlib functions/docs from `jrsonnet-std-sig` (surfaced through + `jrsonnet-lsp-stdlib`) +- in-scope locals +- object fields from inferred types +- import paths using file path + configured import roots +- object-context keywords (`$`, `self`, `super`) +- non-identifier object fields are emitted with bracket-form text edits (for + example `obj.` + `my-field` completion inserts `obj["my-field"]`) + +Server capabilities advertise `.` as trigger. Other completion contexts can +still return items on explicit completion requests. + +Stdlib metadata coverage is enforced by tests in `jrsonnet-std-sig` against: + +- official Jsonnet stdlib function names (`data/official_functions.txt`) +- rjsonnet stdlib function names (`data/rjsonnet_functions.txt`) +- legacy jrsonnet stdlib names (`data/jrsonnet_legacy_functions.txt`) + +### Definition + +File: `crates/jrsonnet-lsp-handlers/src/definition.rs` + +Navigation is split into three related requests with different intent: + +- `textDocument/declaration`: where the symbol is introduced in the current + lexical scope (nearest visible binder). +- `textDocument/definition`: where the symbol ultimately comes from. This can + follow local aliases and resolve to imports or imported fields, including + fields declared with object method syntax. +- `textDocument/typeDefinition`: for Jsonnet, this follows the same target as + `definition` because symbols do not have separate nominal type declarations. +- `textDocument/implementation`: the value/body expression behind the symbol. + +Rule of thumb for users: + +- Use `declaration` when you want "where was this name declared here?". +- Use `definition` when you want "what does this name actually point to?". +- Use `implementation` when you want "what expression computes this value?". + +Example: + +```jsonnet +local x = 1; +local y = x; +local lib = import "lib.libsonnet"; +local alias = lib.foo; +alias +``` + +At `alias` on the last line: + +- `declaration` jumps to `local alias = ...`. +- `definition` jumps to the `foo` declaration in `lib.libsonnet`. +- `implementation` jumps to the right-hand side `lib.foo`. + +At `y` in `local y = x;`: + +- `declaration` jumps to `local y = ...`. +- `definition` jumps to `local x = ...`. +- `implementation` jumps to the right-hand side of `y` (`x`). + +Internally, the handler returns one of: + +- `Local(range)` +- `Import(path)` +- `ImportField { path, fields }` + +The async server context resolves import paths and nested field locations into +final LSP `Location` responses. + +### Document Highlight + +File: `crates/jrsonnet-lsp-handlers/src/document_highlight.rs` + +- Same-file symbol highlighting with scope-aware matching. + +### Formatting + +File: `crates/jrsonnet-lsp-handlers/src/formatting/mod.rs` + +- Uses the built-in formatter implementation. +- Returns a full-document replacement edit when formatting changes text. +- Range formatting returns edits only when formatter changes are entirely + contained in the requested range. +- Computes replacement ranges with UTF-16-aware position conversion. +- Returns `None` when formatting is unavailable or fails. + +### Hover + +File: `crates/jrsonnet-lsp-handlers/src/hover.rs` + +Hover combines: + +- inferred type information +- stdlib documentation/signatures from `jrsonnet-lsp-stdlib` (generated from + `jrsonnet-std-sig`) +- local definition context snippets +- definition-site fallback to bound value type when token-level inference is + `any` +- keyword/operator token docs for language primitives where available + (`jrsonnet-rowan-parser/src/syntax_semantics.rs`, via + `SyntaxKind::token_doc_markdown()`) + +Requires `TypeAnalysis` from async server context. + +### Inlay Hint + +File: `crates/jrsonnet-lsp-handlers/src/inlay_hint.rs` + +- Produces type hints for locals and function-related positions from inferred + types. +- Respects requested visible range. + +### References + +File: `crates/jrsonnet-lsp-handlers/src/references.rs` + +- Same-file references from scope-aware search. +- Cross-file references by searching transitive importers. +- Async server context merges same-file and cross-file results. + +### Rename + +File: `crates/jrsonnet-lsp-handlers/src/rename.rs` + +- `prepare_rename` validates renameability at cursor. +- `rename_cross_file` builds `WorkspaceEdit` across current file and importer + graph where needed. +- New names are validated with `SymbolName`. + +### Semantic Tokens + +File: `crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs` + +- Produces encoded semantic tokens for full-document requests. +- Produces encoded semantic tokens for range requests as well. +- Token-class mapping for keyword/comment/string/number/operator tokens is + centralized in `jrsonnet-rowan-parser/src/syntax_semantics.rs` via + `SyntaxKind` semantic helpers. + + + +#### Semantic Token Legend (Generated) + +Token types (`index`: `lsp_name`, usage): + +- `0`: `namespace` (namespace) +- `1`: `type` (unused) +- `2`: `class` (unused) +- `3`: `enum` (unused) +- `4`: `interface` (unused) +- `5`: `struct` (unused) +- `6`: `typeParameter` (unused) +- `7`: `parameter` (parameter) +- `8`: `variable` (variable) +- `9`: `property` (property) +- `10`: `enumMember` (unused) +- `11`: `event` (unused) +- `12`: `function` (function) +- `13`: `method` (method) +- `14`: `macro` (unused) +- `15`: `keyword` (keyword) +- `16`: `modifier` (unused) +- `17`: `comment` (comment) +- `18`: `string` (string) +- `19`: `number` (number) +- `20`: `regexp` (unused) +- `21`: `operator` (operator) + +Token modifiers (`bit`: `lsp_name`, usage): + +- `1 << 0`: `declaration` (declaration) +- `1 << 1`: `definition` (definition) +- `1 << 2`: `readonly` (readonly) +- `1 << 3`: `static` (static) +- `1 << 4`: `deprecated` (deprecated) +- `1 << 5`: `abstract` (abstract) +- `1 << 6`: `async` (async) +- `1 << 7`: `modification` (modification) +- `1 << 8`: `documentation` (documentation) +- `1 << 9`: `defaultLibrary` (default-library) + + +### Signature Help + +File: `crates/jrsonnet-lsp-handlers/src/signature_help.rs` + +- Trigger characters: `(` and `,`. +- Uses stdlib signatures and local function information. +- Computes active parameter index from call context. +- Emits parameter labels as structural `LabelOffsets` spans within the full + signature label (instead of plain string labels). + +### Symbols + +File: `crates/jrsonnet-lsp-handlers/src/symbols.rs` + +- `document_symbols` builds hierarchical document outline. +- `workspace_symbols_for_document` filters symbols by query for one document. +- Async server path parallelizes across tracked workspace files (import-graph + entries plus currently open documents), loading unopened files from disk via + the document manager. +- Server-side aggregation then ranks matches so exact names come first, then + prefix matches, then other substring matches. +- Workspace symbol responses are capped at `128` entries to keep results bounded + and responsive on large workspaces. + +## Async Request Context + +`AsyncRequestContext` in `crates/jrsonnet-lsp/src/server/async_requests.rs` +provides shared orchestration for async methods: + +- canonical path conversion and document lookup +- dependency-aware `TypeAnalysis` creation via `TypeProvider` +- import graph lookups for cross-file features +- config access (`jpath`, command behavior) +- import-target resolution via graph-backed entries + +Import-graph state is populated from open/closed document updates and from +startup workspace-root bootstrap indexing during server initialization. + +This keeps handler modules focused on feature logic while server code owns +runtime and cross-cutting orchestration. + +## Type Analysis Usage + +`TypeAnalysis` is currently required in these request paths: + +- hover +- inlay hints +- completion +- code lens + +These analyses are computed in async context before handler invocation. + +## Commands and Handler Interaction + +Advertised command IDs: + +- `jrsonnet.evalFile` +- Custom requests: + - `jrsonnet/evalFile` + - `jrsonnet/evalExpression` + - `jrsonnet/findTransitiveImporters` + +Unknown command IDs are rejected with an explicit LSP `InvalidParams` response +error. + +## Testing Strategy + +- Handler unit tests live with handler modules in + `crates/jrsonnet-lsp-handlers/src/*`. +- LSP integration and routing tests live in `crates/jrsonnet-lsp/tests`. diff --git a/docs/lsp/README.md b/docs/lsp/README.md new file mode 100644 index 00000000..d5dab068 --- /dev/null +++ b/docs/lsp/README.md @@ -0,0 +1,299 @@ +# jrsonnet-lsp + +Rustanka includes a Jsonnet Language Server (`jrsonnet-lsp`) that adds editor +features like navigation, completion, diagnostics, and formatting over LSP. This +README is a practical guide to what it supports and how to configure it. + +For internal architecture details, see: + +- `docs/lsp/ARCHITECTURE.md` +- `docs/lsp/HANDLERS.md` +- `docs/lsp/TYPE_SYSTEM.md` + +## Running the server + +The server speaks LSP over stdio: + +```bash +jrsonnet-lsp --log-level info +``` + +## Capabilities + +Advertised LSP features: + +- Text sync: open/close, incremental changes, save. +- Navigation: `definition`, `declaration`, `typeDefinition`, `implementation`. +- Info and discovery: `hover`, `documentSymbol`, `workspace/symbol`, + `documentHighlight`. +- Editing helpers: `completion` (trigger `.`), `signatureHelp` (triggers `(` and + `,`), `rename` (with `prepareRename`), `references`. +- In-editor metadata: `inlayHint`, `semanticTokens/full`, + `semanticTokens/range`, `codeLens` (+ `codeLens/resolve`). +- Actions: `codeAction` (`quickfix`, `source.fixAll`), `formatting`, + `rangeFormatting`. + +When the client supports dynamic watched-file registration, the server also +registers watchers for: + +- `**/*.jsonnet` +- `**/*.libsonnet` +- `**/*.json` + +## Custom Requests + +The server exposes custom methods for non-standard operations. `jrsonnet/evalFile` +is also exposed via the Evaluate code lens. + +### `jrsonnet/evalFile` + +Evaluate a Jsonnet file and return its resulting JSON value. + +Example request: + +```json +{ + "method": "jrsonnet/evalFile", + "params": { + "textDocument": { + "uri": "file:///a/b/main.jsonnet" + } + } +} +``` + +Example response: + +```json +42 +``` + +This operation is also exposed via the Evaluate code lens. + +### `jrsonnet/evalExpression` + +Evaluate an arbitrary Jsonnet expression and return its resulting JSON value. +Optionally provide a base document URI to resolve imports relative to a file. + +Example request: + +```json +{ + "method": "jrsonnet/evalExpression", + "params": { + "expression": "(import \"lib.libsonnet\")(40)", + "baseDocument": { + "uri": "file:///a/b/main.jsonnet" + } + } +} +``` + +Example response: + +```json +42 +``` + +### `jrsonnet/findTransitiveImporters` + +Return all files that transitively import the requested document. + +Example request: + +```json +{ + "method": "jrsonnet/findTransitiveImporters", + "params": { + "textDocument": { + "uri": "file:///a/b/lib.libsonnet" + } + } +} +``` + +Example response: + +```json +{ + "file": "file:///a/b/lib.libsonnet", + "transitiveImporters": ["file:///a/b/main.jsonnet"] +} +``` + +## Configuration + +Configuration is accepted from: + +- `initialize.initializationOptions` +- `workspace/didChangeConfiguration` + +`didChangeConfiguration` can be either: + +- Flat settings object, or +- Nested under `jsonnet` or `jsonnet-language-server`. + +Top-level options: + +| Key | Type | Default | Accepted values and notes | +| --- | --- | --- | --- | +| `jpath` | `string[]` | `[]` | Import search paths (same idea as `jsonnet -J`). | +| `ext_vars` | `object` | `{}` | Map of external string vars. Aliases: `extVars`, `ext_vars`. | +| `ext_code` | `object` | `{}` | Map of external code vars. Aliases: `extCode`, `ext_code`. | +| `enable_eval_diagnostics` | `boolean` | `false` | Aliases: `enableEvalDiagnostics`, `eval`. | +| `enable_lint_diagnostics` | `boolean` | `false` | Aliases: `enableLintDiagnostics`, `lint`. | +| `resolve_paths_with_tanka` | `string` | `"auto"` | Modes: "false", "auto", "true". Booleans are also accepted (`false` -> "false", `true` -> "true"). Aliases: `resolvePathsWithTanka`, `tankaMode`. | +| `formatting` | `object` | `{}` | Formatting options, see below. | +| `code_actions` | `object` | `{ "removeUnused": "all", "removeUnusedComments": "none" }` | Alias: `codeActions`. | +| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off", "anonymousFunctionReturns": "off", "callArguments": "off", "comprehensions": "off", "destructuring": "off" }` | Alias: `inlayHints`. | +| `log_level` | `string \| null` | `null` | Alias: `logLevel`. Intended values are standard log levels (for example `error`, `warn`, `info`, `debug`). Currently this value is parsed/stored but runtime logging is still controlled by process startup flags/env. | + +### `code_actions` + +| Key | Default | Accepted values | +| ---------------------- | ------- | -------------------------------------------- | +| `removeUnused` | `all` | `all`, `importBindings`, `nonImportBindings` | +| `removeUnusedComments` | `none` | `none`, `above`, `below`, `all` | + +### `inlay_hints` + +| Key | Default | Accepted values | +| --- | --- | --- | +| `local` | `all` | `off`, `variables`, `functions`, `all` | +| `objectLocal` | `all` | `off`, `variables`, `functions`, `all` | +| `objectMembers` | `off` | `off`, `fields`, `methods`, `all` | +| `functionParameters` | `off` | `off`, `all` | +| `anonymousFunctionReturns` | `off` | `off`, `all` | +| `callArguments` | `off` | `off`, `all` | +| `comprehensions` | `off` | `off`, `all` | +| `destructuring` | `off` | `off`, `all` | + +Notes: + +- `comprehensions` controls hints for `for ... in ...` bindings. +- Destructured comprehension bindings (for example `for [a, b] in ...`) + emit per-name hints when both `comprehensions = "all"` and + `destructuring = "all"` are enabled. + +### `formatting` + +If a field is omitted, the formatter default is used. + +Range formatting is conservative: it returns edits only when formatter changes +are fully contained in the requested range. + +Formatting fields: + +| Key | Type | Default when unset | Accepted values | +| ------------------------ | --------- | ----------------------------------------- | ---------------------------------------------------------------------------------- | +| `indent` | `number` | `0` (tab-based indentation) | non-negative integer | +| `max_blank_lines` | `number` | `2` | non-negative integer | +| `string_style` | `string` | `leave` | `double`, `single`, `leave` (also accepts `d`/`s`; other values behave as `leave`) | +| `comment_style` | `string` | `leave` | `hash`, `slash`, `leave` (also accepts `h`/`s`; other values behave as `leave`) | +| `pad_arrays` | `boolean` | `false` | `true`/`false` | +| `pad_objects` | `boolean` | `true` | `true`/`false` | +| `pretty_field_names` | `boolean` | `true` | `true`/`false` | + +Formatting keys also accept additional aliases: + +- camelCase: `maxBlankLines`, `stringStyle`, `commentStyle`, `padArrays`, + `padObjects`, `prettyFieldNames` +- legacy go-jsonnet style: `Indent`, `MaxBlankLines`, `StringStyle`, + `CommentStyle`, `PadArrays`, `PadObjects`, `PrettyFieldNames` + +## Example + +```json +{ + "jsonnet": { + "jpath": ["vendor", "lib"], + "enableEvalDiagnostics": false, + "enableLintDiagnostics": true, + "resolvePathsWithTanka": "auto", + "codeActions": { + "removeUnused": "all", + "removeUnusedComments": "none" + }, + "inlayHints": { + "local": "all", + "objectLocal": "all", + "objectMembers": "fields", + "functionParameters": "all", + "anonymousFunctionReturns": "all", + "callArguments": "all", + "comprehensions": "all", + "destructuring": "all" + }, + "formatting": { + "indent": 2, + "string_style": "leave" + } + } +} +``` + +### Neovim 0.11+ configuration + +Neovim 0.11+ uses built-in `vim.lsp.config(...)` to define/extend a server +config, then `vim.lsp.enable(...)` to activate it for matching buffers. +`nvim-lspconfig` is still useful for shipping server config files, but +`require('lspconfig').*.setup{}` is deprecated in favor of this flow. + +```lua +-- init.lua +-- Practical Neovim 0.11+ setup. +vim.lsp.config("jrsonnet_lsp", { + -- Neovim options most people set: + cmd = { "jrsonnet-lsp" }, + filetypes = { "jsonnet", "libsonnet" }, + root_markers = { "jsonnetfile.json", ".git" }, + -- Optional custom root detection: + -- root_dir = function(bufnr, on_dir) + -- local root = vim.fs.root(bufnr, { "jsonnetfile.json", ".git" }) + -- if root then + -- on_dir(root) + -- end + -- end, + + -- jrsonnet-lsp initializationOptions: + -- init_options = { + -- jpath = {}, -- string[] + -- extVars = {}, -- map; aliases: extVars/ext_vars + -- extCode = {}, -- map; aliases: extCode/ext_code + -- enableEvalDiagnostics = false, -- boolean; aliases: enableEvalDiagnostics/eval + -- enableLintDiagnostics = false, -- boolean; aliases: enableLintDiagnostics/lint + -- resolvePathsWithTanka = "auto", -- "false"|"auto"|"true"; booleans also accepted; aliases: resolvePathsWithTanka/tankaMode + -- formatting = { + -- indent = 2, -- integer >= 0 | nil + -- max_blank_lines = 2, -- integer >= 0 | nil + -- string_style = "leave", -- "double"|"single"|"leave"|"d"|"s"|nil + -- comment_style = "leave", -- "hash"|"slash"|"leave"|"h"|"s"|nil + -- pad_arrays = false, -- boolean|nil + -- pad_objects = true, -- boolean|nil + -- pretty_field_names = true, -- boolean|nil + -- }, + -- codeActions = { + -- removeUnused = "all", -- "all"|"importBindings"|"nonImportBindings" + -- removeUnusedComments = "none", -- "none"|"above"|"below"|"all" + -- }, + -- inlayHints = { + -- local = "all", -- "off"|"variables"|"functions"|"all" + -- objectLocal = "all", -- "off"|"variables"|"functions"|"all" + -- objectMembers = "off", -- "off"|"fields"|"methods"|"all" + -- functionParameters = "off", -- "off"|"all" + -- anonymousFunctionReturns = "off", -- "off"|"all" + -- callArguments = "off", -- "off"|"all" + -- comprehensions = "off", -- "off"|"all" + -- destructuring = "off", -- "off"|"all" + -- }, + -- logLevel = nil, -- string|nil (for example: "error"|"warn"|"info"|"debug") + -- }, + + -- Same keys are also accepted via didChangeConfiguration. + -- `settings` can be flat, or nested under: + -- settings = { jsonnet = { ... } } + -- settings = { ["jsonnet-language-server"] = { ... } } +}) + +vim.lsp.enable("jrsonnet_lsp") +``` diff --git a/docs/lsp/TYPE_SYSTEM.md b/docs/lsp/TYPE_SYSTEM.md new file mode 100644 index 00000000..4b9490a7 --- /dev/null +++ b/docs/lsp/TYPE_SYSTEM.md @@ -0,0 +1,211 @@ +# jrsonnet-lsp Type System + +This document describes the current type-system architecture used by the LSP, +with emphasis on interned representation, store layering, type operations, and +cross-file usage. + +## Scope + +Core type-system code lives in `crates/jrsonnet-lsp-types`. + +Inference and analysis integration live in `crates/jrsonnet-lsp-inference`, +which uses these types and stores to produce `TypeAnalysis` for handlers, +completion, diagnostics, and refactoring features. + +## Design Goals + +- Fast equality checks (`Ty` ID compare). +- Memory efficiency via interning and deduplication. +- Shareable type state across files (`GlobalTyStore`). +- Efficient per-analysis mutation (`MutStore` + local types). +- Expressive enough for Jsonnet semantics (objects, tuples, unions, + intersections, bounded numbers, and function return relationships). + +## Core Representation + +## `Ty`: interned type handle + +`Ty` is a compact `u32` wrapper (`Copy`, `Eq`, `Hash`) that references interned +`TyData`. + +Important constants include: + +- `Ty::ANY` +- `Ty::NEVER` +- `Ty::NULL` +- `Ty::BOOL`, `Ty::TRUE`, `Ty::FALSE` +- `Ty::NUMBER` +- `Ty::STRING` +- `Ty::CHAR` + +Well-known IDs are reserved and stable across stores. + +## Local/global distinction + +The top bit (`LOCAL_BIT`) distinguishes origin: + +- local type IDs for per-analysis local store entries +- global type IDs for shared global store entries + +This enables cheap local construction during analysis and later merging into the +global store. + +## `TyData`: structural type payload + +Representative variants include: + +- primitives: `Any`, `Never`, `Null`, `Bool`, `Number`, `String`, literals +- collections: `Array`, `Tuple` +- objects: `Object(ObjectData)`, `AttrsOf` +- functions: `Function(FunctionData)` +- type algebra: `Union(Vec)`, `Sum(Vec)` +- constraints: `BoundedNumber(NumBounds)`, `TypeVar` + +## Store Architecture + +`jrsonnet-lsp-types` exposes three complementary stores. + +## `GlobalTyStore` + +- Thread-safe shared store (`Arc` + lock-protected intern tables). +- Lives for server lifetime. +- Used for cross-file type reuse and cache stability. + +## `LocalTyStore` + +- Per-analysis local store. +- No cross-thread sharing requirement. +- Cheap staging area for temporary type construction. + +## `MutStore` + +- Unified interface that reads global + local and interns into local as needed. +- Used heavily by inference/analysis code. +- Supports merging locals into global through substitution machinery + (`TySubst`). + +## Type Substitution and Merge + +When local analysis results must survive outside the local context, local IDs +are remapped to global IDs. `subst.rs` and `TySubst` cover this remapping +process. + +This keeps handler-visible and cache-visible types globally valid. + +## Structural Data Types + +## Object model + +`ObjectData` carries: + +- sorted field list of `(String, FieldDefInterned)` +- `has_unknown` flag to model open vs closed objects + +Field defs include type, required-ness, and visibility (`:`, `::`, `:::` +semantics). + +## Function model + +`FunctionData` carries params and return specification. Params include: + +- name +- type +- default-value presence + +`ReturnSpec` supports fixed and relationship-based returns, including patterns +such as "same as arg" and "array of arg", which are useful for stdlib function +modeling. + +## Number bounds + +`NumBounds` stores optional min/max bounds and supports subset/intersection +logic for narrowing and compatibility checks. + +## Core Type Operations + +`crates/jrsonnet-lsp-types/src/operations.rs` implements operation semantics. + +Key APIs: + +- `binary_op_result_ty` +- `unary_op_result_ty` +- `array_concat_ty` +- `ty_and` (intersection/narrowing) +- `ty_minus` (set subtraction from unions) +- `ty_with_len` +- `ty_with_min_len` +- `ty_with_field` + +These operations are used in inference and lint/type-check diagnostics. + +## Unification and Subtyping + +`crates/jrsonnet-lsp-types/src/unification.rs` provides: + +- `unify_ty` +- `is_subtype_ty` +- `types_equivalent_ty` + +Important traits of this unifier: + +- variance-aware (`Covariant`, `Contravariant`) +- structural matching for objects/functions/collections +- detailed error reporting with `UnifyError` and path context +- union mismatch reporting with per-variant error detail + +## Display and Diagnostics Formatting + +`display.rs` exposes formatting helpers (`DisplayTy`, `DisplayContext`) used to +turn internal interned types into readable strings for hovers, inlay hints, and +diagnostics. + +The display layer is intentionally separate from inference logic. + +## Integration in Inference + +`TypeAnalysis` in `crates/jrsonnet-lsp-inference/src/analysis.rs` stores: + +- immutable map from expression ranges to inferred `Ty` +- document root type +- an internal `RwLock` for query-time helpers + +`TypeAnalysis` exposes utilities used by handlers/checks: + +- type lookup by range/position +- field extraction and object checks +- indexable/callable/sliceable checks +- display and store-access helpers + +## Cross-file Analysis and Cache + +`TypeProvider` (`provider.rs`) ensures dependency-aware analysis order using +`ImportGraph`. + +`TypeCache` (`type_cache.rs`) stores top-level inferred type per file with +version checks and LRU behavior. `CachingImportResolver` reads from this cache +for import type resolution. + +This architecture allows import-aware inference without requiring full project +re-analysis on every request. + +## Flow Typing Integration + +Flow-sensitive narrowing logic is implemented in +`crates/jrsonnet-lsp-inference/src/flow.rs` and works with type-system +operations (`ty_and`, `ty_minus`, and related helpers). + +Stdlib predicate metadata (`jrsonnet-std-sig`, `flow_typing`) feeds into this +narrowing behavior so predicates such as `std.isNumber(x)` can refine branch +types. + +## Why This Shape Works for the LSP + +The current design balances correctness and latency: + +- interned `Ty` handles keep comparisons and copies cheap +- local/global split reduces synchronization costs during inference +- explicit substitution maintains correctness when persisting local results +- unified operations/unification provide consistent behavior across handlers and + diagnostics +- dependency-aware cache and provider keep cross-file features fast enough for + interactive use diff --git a/test_fixtures/golden_envs/lazy_import_cycle_env/cycle.libsonnet b/test_fixtures/golden_envs/lazy_import_cycle_env/cycle.libsonnet new file mode 100644 index 00000000..6f36b8a6 --- /dev/null +++ b/test_fixtures/golden_envs/lazy_import_cycle_env/cycle.libsonnet @@ -0,0 +1,3 @@ +{ + deferred: (import 'lazy.libsonnet').deferred, +} diff --git a/test_fixtures/golden_envs/lazy_import_cycle_env/golden/default/ConfigMap-lazy-cycle.golden b/test_fixtures/golden_envs/lazy_import_cycle_env/golden/default/ConfigMap-lazy-cycle.golden new file mode 100644 index 00000000..568b3e75 --- /dev/null +++ b/test_fixtures/golden_envs/lazy_import_cycle_env/golden/default/ConfigMap-lazy-cycle.golden @@ -0,0 +1,7 @@ +apiVersion: v1 +data: + message: lazy-cycle-ok +kind: ConfigMap +metadata: + name: lazy-cycle + namespace: default diff --git a/test_fixtures/golden_envs/lazy_import_cycle_env/jsonnetfile.json b/test_fixtures/golden_envs/lazy_import_cycle_env/jsonnetfile.json new file mode 100644 index 00000000..4aeabd69 --- /dev/null +++ b/test_fixtures/golden_envs/lazy_import_cycle_env/jsonnetfile.json @@ -0,0 +1 @@ +{"version":1} diff --git a/test_fixtures/golden_envs/lazy_import_cycle_env/lazy.libsonnet b/test_fixtures/golden_envs/lazy_import_cycle_env/lazy.libsonnet new file mode 100644 index 00000000..a0edf9ec --- /dev/null +++ b/test_fixtures/golden_envs/lazy_import_cycle_env/lazy.libsonnet @@ -0,0 +1,4 @@ +{ + message: 'lazy-cycle-ok', + deferred: (import 'cycle.libsonnet').deferred, +} diff --git a/test_fixtures/golden_envs/lazy_import_cycle_env/main.jsonnet b/test_fixtures/golden_envs/lazy_import_cycle_env/main.jsonnet new file mode 100644 index 00000000..8584e65d --- /dev/null +++ b/test_fixtures/golden_envs/lazy_import_cycle_env/main.jsonnet @@ -0,0 +1,25 @@ +local lazy = import 'lazy.libsonnet'; + +{ + apiVersion: 'tanka.dev/v1alpha1', + kind: 'Environment', + metadata: { + name: 'lazy-import-cycle', + }, + spec: { + namespace: 'default', + }, + data: { + 'ConfigMap-lazy-cycle': { + apiVersion: 'v1', + kind: 'ConfigMap', + metadata: { + name: 'lazy-cycle', + namespace: 'default', + }, + data: { + message: lazy.message, + }, + }, + }, +} diff --git a/xtask/src/main.rs b/xtask/src/main.rs index e21533c6..88c6cb82 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -4,6 +4,29 @@ use xshell::{cmd, Shell}; mod sourcegen; +const LSP_LINT_PACKAGES: &[&str] = &[ + "-p", + "jrsonnet-lsp", + "-p", + "jrsonnet-lsp-check", + "-p", + "jrsonnet-lsp-document", + "-p", + "jrsonnet-lsp-handlers", + "-p", + "jrsonnet-lsp-import", + "-p", + "jrsonnet-lsp-inference", + "-p", + "jrsonnet-lsp-scenario", + "-p", + "jrsonnet-lsp-scope", + "-p", + "jrsonnet-lsp-stdlib", + "-p", + "jrsonnet-lsp-types", +]; + #[derive(Parser)] enum Opts { /// Generate files for rowan parser @@ -80,6 +103,14 @@ fn main() -> Result<()> { Opts::Lint { fix } => { let fmt_check = if fix { None } else { Some("--check") }; cmd!(sh, "cargo fmt {fmt_check...}").run()?; + let clippy_fix = if fix { Some("--fix") } else { None }; + let allow_dirty = if fix { Some("--allow-dirty") } else { None }; + let allow_staged = if fix { Some("--allow-staged") } else { None }; + cmd!( + sh, + "cargo clippy {LSP_LINT_PACKAGES...} --all-targets --no-deps {clippy_fix...} {allow_dirty...} {allow_staged...} -- -D warnings" + ) + .run()?; Ok(()) } Opts::TestCBindings { diff --git a/xtask/src/sourcegen/mod.rs b/xtask/src/sourcegen/mod.rs index 27737f7e..9fcda3bf 100644 --- a/xtask/src/sourcegen/mod.rs +++ b/xtask/src/sourcegen/mod.rs @@ -202,9 +202,15 @@ fn generate_nodes(kinds: &KindsSrc, grammar: &AstSrc) -> Result { quote!(impl ast::#trait_name for #name {}) }); + // Count how many fields of each type exist to handle cases like + // ExprBinary = lhs:Expr BinaryOperator rhs:Expr + // where we need to get the nth child of type Expr + let mut type_counts: std::collections::HashMap = + std::collections::HashMap::new(); let methods = node.fields.iter().map(|field| { let method_name = field.method_name(kinds); let ty = field.ty(); + let ty_str = ty.to_string(); if field.is_many() { quote! { @@ -225,9 +231,24 @@ fn generate_nodes(kinds: &KindsSrc, grammar: &AstSrc) -> Result { } } } else { - quote! { - pub fn #method_name(&self) -> Option<#ty> { - support::child(&self.syntax) + // Track which occurrence of this type this field is + let index = *type_counts.get(&ty_str).unwrap_or(&0); + type_counts.insert(ty_str, index + 1); + + if index == 0 { + // First field of this type - use simple child() + quote! { + pub fn #method_name(&self) -> Option<#ty> { + support::child(&self.syntax) + } + } + } else { + // Nth field of this type - use children().nth(N) + let index_lit = proc_macro2::Literal::usize_unsuffixed(index); + quote! { + pub fn #method_name(&self) -> Option<#ty> { + support::children::<#ty>(&self.syntax).nth(#index_lit) + } } } } @@ -535,7 +556,12 @@ pub fn escape_token_macro(token: &str) -> TokenStream { quote! { #c } } else if token.contains('$') { quote! { #token } + } else if token.chars().all(|c| c.is_ascii_alphabetic() || c == '_') { + // Keywords like "tailstrict", "local", etc. - use as identifier + let ident = format_ident!("{}", token); + quote! { #ident } } else { + // Punctuation tokens like "+", "==", etc. let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint)); quote! { #(#cs)* } }